示例#1
0
  def execute(self):
    # The 'shelled' option is only passed by this execute method and indicates a shelled run of pants.
    if not self.get_options().shelled:

      # This task is specialized to build just one target - the spindle source.
      targets = [self.spindle_target]
      # TODO: This invalidation is incomplete and should do the stuff done by the jvm_compile fingerprint
      # strategy. But since this task is scheduled to complete before the classpath is resolved, this is tricky.
      with self.invalidated(targets, invalidate_dependents=True) as invalidation_check:
        targets = invalidation_check.all_vts
        if targets and len(targets) != 1:
          raise TaskError("There should only be one versioned target for the build_spindle task!"
                          "(was: {})".format(targets))
        vt = targets[0]
        invalid_vts_by_target = {vt.target: vt}
        if not vt.valid:
          args = ['--build-spindle-shelled', 'bundle', '--bundle-jvm-deployjar']
          args.append(self.get_options().spindle_codegen_binary)
          results = self.run_pants_no_lock(args, workunit_name='spindle-build')

          if results.returncode != 0:
            # Purposefully not returning a message so the error from the shelled run can be surfaced.
            raise TaskError()

          spindle_bundle = self.spindle_bundle_out
          safe_mkdir(vt.results_dir)
          spindle_binary = os.path.join(vt.results_dir, 'spindle-bundle.jar')
          try:
            shutil.copy(spindle_bundle, spindle_binary)
          except Exception as e:
            raise TaskError("Could not copy the spindle binary at {}:\n{}".format(spindle_bundle, e))

        self.context.products.get('spindle_binary').add(vt.target, vt.results_dir).append('spindle-bundle.jar')
示例#2
0
  def _execute_codegen(self, targets):
    sources = self._calculate_sources(targets, lambda t: isinstance(t, SpindleThriftLibrary))
    scalate_workdir = os.path.join(self.workdir, 'scalate_workdir')
    safe_mkdir(self.namespace_out)
    # Spindle incorrectly caches state in its workdir so delete those files in sucess or failure.
    safe_mkdir(scalate_workdir, clean=True)

    thrift_include = self.get_options().thrift_include
    if not thrift_include:
      raise self.BadDependency("You must pass the paths of your thrift roots as the '--thrift_include' option!")

    scala_template_address = os.path.join(self.scala_template.address.spec_path,
                                          self.scala_template.entry_point)
    java_template_address = os.path.join(self.java_template.address.spec_path,
                                         self.java_template.entry_point)
    spindle_args = [
      '--template', scala_template_address,
      '--java_template', java_template_address,
      '--thrift_include', ':'.join(thrift_include),
      '--namespace_out', self.namespace_out,
      '--working_dir', scalate_workdir,
    ]

    spindle_args.extend(sources)
    result = self._run_spindle(spindle_args)
    if result != 0:
      raise TaskError('Spindle codegen exited non-zero ({0})'.format(result))
示例#3
0
  def _fetch_artifacts(self, local_override_versions):
    """Download jars from maven repo into the artifact cache dir, then symlink them into our workdir."""

    products = self.context.products
    # Coordinate -> set(relative path to symlink of artifact in symlink farm)
    coord_to_artifact_symlinks = defaultdict(set)
    # Demanded by some downstream tasks
    products.safe_create_data('ivy_cache_dir', lambda: self.pom_cache_dir)
    coords = set(
      Coordinate(*t)
      for t in chain.from_iterable(self.target_to_maven_coordinate_closure.values())
    )
    artifacts_to_download = set()
    for coord in coords:
      for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
        # Sanity check. At this point, all artifacts mapped to a coord should be fully resolved, location included.
        if artifact.repo_url is None:
          raise Exception("Something went wrong! {} was mapped to an artifact {} with no "
                          "associated repo: ".format(coord, artifact))
        cached_artifact_path = os.path.join(self.pom_cache_dir, artifact.artifact_path)
        if not os.path.exists(cached_artifact_path):
          artifacts_to_download.add(artifact)
    self._download_artifacts(artifacts_to_download)

    ivy_symlink_map = self.context.products.get_data('ivy_resolve_symlink_map', dict)
    for coord in coords:
      for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
        local_override_key = (artifact.groupId, artifact.artifactId, artifact.version)
        if local_override_key not in local_override_versions:
          cached_artifact_path = os.path.realpath(os.path.join(self.pom_cache_dir, artifact.artifact_path))
        else:
          cached_artifact_path = os.path.realpath(local_override_versions[local_override_key])
          if not os.path.exists(cached_artifact_path):
            raise Exception('Local override for {} at {} does not exist.'.format(artifact, cached_artifact_path))

        symlinked_artifact_path = os.path.join(self.artifact_symlink_dir, artifact.artifact_path)
        safe_mkdir(os.path.dirname(symlinked_artifact_path))

        try:
          os.symlink(cached_artifact_path, symlinked_artifact_path)
        except OSError as e:
          if e.errno != errno.EEXIST:
            raise
          existing_symlink_target = os.readlink(symlinked_artifact_path)
          if existing_symlink_target != cached_artifact_path:
            raise Exception(
              'A symlink already exists for artifact {}, but it points to the wrong path.\n'
              'Symlink: {}\n'
              'Destination of existing symlink: {}\n'
              'Where this symlink should point: {}\n'
              .format(
                artifact,
                symlinked_artifact_path,
                existing_symlink_target,
                cached_artifact_path))
        ivy_symlink_map[cached_artifact_path] = symlinked_artifact_path
        coord_to_artifact_symlinks[artifact] = symlinked_artifact_path
    return coord_to_artifact_symlinks
示例#4
0
    def _fetch_source_jars(self, fetchers, symlink_dir):
        future_session = FuturesSession(max_workers=4)
        coords = set(
            Coordinate(*t) for t in chain.from_iterable(
                self.target_to_maven_coordinate_closure.values()))
        artifacts_to_symlink = set()
        artifacts_to_download = set()
        with self.context.new_workunit('find-source-jars'):
            for coord in coords:
                for artifact in self.maven_coordinate_to_provided_artifacts[
                        coord]:
                    source_jar = artifact._replace(classifier='sources')
                    cached_source_jar_path = os.path.join(
                        self.pom_cache_dir, source_jar.artifact_path)
                    already_downloaded = os.path.exists(cached_source_jar_path)
                    artifacts_to_symlink.add(artifact)
                    if not already_downloaded:
                        # TODO(mateo): This probably should be a ChainedFetcher method instead of iterating over fetchers here.
                        for fetcher in fetchers:
                            if fetcher.resource_exists(source_jar):
                                source_jar = source_jar._replace(
                                    repo_url=fetcher.repo)
                                artifacts_to_symlink.add(source_jar)
                                artifacts_to_download.add(source_jar)
                                break
                    else:
                        artifacts_to_symlink.add(source_jar)

        with self.context.new_workunit('download-source-jars'):
            self._download_artifacts(artifacts_to_download)
        with self.context.new_workunit('symlink-source-jars'):
            safe_mkdir(symlink_dir)
            for artifact in artifacts_to_symlink:
                cached_artifact_path = os.path.join(self.pom_cache_dir,
                                                    artifact.artifact_path)
                symlinked_artifact_path = os.path.join(
                    symlink_dir,
                    artifact.artifact_path.replace('/', '_'),
                )
                safe_mkdir(os.path.dirname(symlinked_artifact_path))
                try:
                    os.symlink(cached_artifact_path, symlinked_artifact_path)
                except OSError as e:
                    if e.errno != errno.EEXIST:
                        stderr(
                            'Failed to link artifact {} to the symlink farm at {}'
                            .format(cached_artifact_path,
                                    symlinked_artifact_path))
                        raise
示例#5
0
 def _background_stream(self, artifact, session, response):
   response.raise_for_status()
   cached_artifact_path = os.path.join(self.pom_cache_dir, artifact.artifact_path)
   temp_path = '{}-{}'.format(cached_artifact_path, uuid4())
   safe_mkdir(os.path.dirname(cached_artifact_path))
   with open(temp_path, 'wb') as f:
     for chunk in response.iter_content(4096):
       f.write(chunk)
   if os.path.lexists(cached_artifact_path):
     if not file_contents_equal(temp_path, cached_artifact_path):
       raise Exception(
         'About to rename downloaded artifact {} from {} to {}, but the destination path'
         ' already exists and has different contents.'
         .format(artifact, temp_path, cached_artifact_path))
   else:
     os.rename(temp_path, cached_artifact_path)
示例#6
0
 def _background_stream(self, artifact, session, response):
     response.raise_for_status()
     cached_artifact_path = os.path.join(self.pom_cache_dir,
                                         artifact.artifact_path)
     temp_path = '{}-{}'.format(cached_artifact_path, uuid4())
     safe_mkdir(os.path.dirname(cached_artifact_path))
     with open(temp_path, 'wb') as f:
         for chunk in response.iter_content(4096):
             f.write(chunk)
     if os.path.lexists(cached_artifact_path):
         if not file_contents_equal(temp_path, cached_artifact_path):
             raise Exception(
                 'About to rename downloaded artifact {} from {} to {}, but the destination path'
                 ' already exists and has different contents.'.format(
                     artifact, temp_path, cached_artifact_path))
     else:
         os.rename(temp_path, cached_artifact_path)
示例#7
0
  def _fetch_source_jars(self, fetchers, symlink_dir):
    future_session = FuturesSession(max_workers=4)
    coords = set(
      Coordinate(*t)
      for t in chain.from_iterable(self.target_to_maven_coordinate_closure.values())
    )
    artifacts_to_symlink = set()
    artifacts_to_download = set()
    with self.context.new_workunit('find-source-jars'):
      for coord in coords:
        for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
          source_jar = artifact._replace(classifier='sources')
          cached_source_jar_path = os.path.join(self.pom_cache_dir, source_jar.artifact_path)
          already_downloaded = os.path.exists(cached_source_jar_path)
          artifacts_to_symlink.add(artifact)
          if not already_downloaded:
            # TODO(mateo): This probably should be a ChainedFetcher method instead of iterating over fetchers here.
            for fetcher in fetchers:
              if fetcher.resource_exists(source_jar):
                source_jar = source_jar._replace(repo_url=fetcher.repo)
                artifacts_to_symlink.add(source_jar)
                artifacts_to_download.add(source_jar)
                break
          else:
            artifacts_to_symlink.add(source_jar)

    with self.context.new_workunit('download-source-jars'):
      self._download_artifacts(artifacts_to_download)
    with self.context.new_workunit('symlink-source-jars'):
      safe_mkdir(symlink_dir)
      for artifact in artifacts_to_symlink:
        cached_artifact_path = os.path.join(self.pom_cache_dir, artifact.artifact_path)
        symlinked_artifact_path = os.path.join(
          symlink_dir,
          artifact.artifact_path.replace('/', '_'),
        )
        safe_mkdir(os.path.dirname(symlinked_artifact_path))
        try:
          os.symlink(cached_artifact_path, symlinked_artifact_path)
        except OSError as e:
          if e.errno != errno.EEXIST:
            stderr(
              'Failed to link artifact {} to the symlink farm at {}'
              .format(cached_artifact_path, symlinked_artifact_path))
            raise
示例#8
0
    def _fetch_artifacts(self, local_override_versions):
        """Download jars from maven repo into the artifact cache dir, then symlink them into our workdir."""

        products = self.context.products
        # Coordinate -> set(relative path to symlink of artifact in symlink farm)
        coord_to_artifact_symlinks = defaultdict(set)
        # Demanded by some downstream tasks
        products.safe_create_data('ivy_cache_dir', lambda: self.pom_cache_dir)
        coords = set(
            Coordinate(*t) for t in chain.from_iterable(
                self.target_to_maven_coordinate_closure.values()))
        artifacts_to_download = set()
        for coord in coords:
            for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
                # Sanity check. At this point, all artifacts mapped to a coord should be fully resolved, location included.
                if artifact.repo_url is None:
                    raise Exception(
                        "Something went wrong! {} was mapped to an artifact {} with no "
                        "associated repo: ".format(coord, artifact))
                cached_artifact_path = os.path.join(self.pom_cache_dir,
                                                    artifact.artifact_path)
                if not os.path.exists(cached_artifact_path):
                    artifacts_to_download.add(artifact)
        self._download_artifacts(artifacts_to_download)

        ivy_symlink_map = self.context.products.get_data(
            'ivy_resolve_symlink_map', dict)
        for coord in coords:
            for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
                local_override_key = (artifact.groupId, artifact.artifactId,
                                      artifact.version)
                if local_override_key not in local_override_versions:
                    cached_artifact_path = os.path.realpath(
                        os.path.join(self.pom_cache_dir,
                                     artifact.artifact_path))
                else:
                    cached_artifact_path = os.path.realpath(
                        local_override_versions[local_override_key])
                    if not os.path.exists(cached_artifact_path):
                        raise Exception(
                            'Local override for {} at {} does not exist.'.
                            format(artifact, cached_artifact_path))

                symlinked_artifact_path = os.path.join(
                    self.artifact_symlink_dir, artifact.artifact_path)
                safe_mkdir(os.path.dirname(symlinked_artifact_path))

                try:
                    os.symlink(cached_artifact_path, symlinked_artifact_path)
                except OSError as e:
                    if e.errno != errno.EEXIST:
                        raise
                    existing_symlink_target = os.readlink(
                        symlinked_artifact_path)
                    if existing_symlink_target != cached_artifact_path:
                        raise Exception(
                            'A symlink already exists for artifact {}, but it points to the wrong path.\n'
                            'Symlink: {}\n'
                            'Destination of existing symlink: {}\n'
                            'Where this symlink should point: {}\n'.format(
                                artifact, symlinked_artifact_path,
                                existing_symlink_target, cached_artifact_path))
                ivy_symlink_map[cached_artifact_path] = symlinked_artifact_path
                coord_to_artifact_symlinks[artifact] = symlinked_artifact_path
        return coord_to_artifact_symlinks
示例#9
0
    def execute(self):

        # Pants does no longer allows options to be tuples or sets. So we use lists of dicts and then convert into
        # hashable structures here.

        # Pins converted to { (org, name): rev, ... }
        global_pinned_tuples = {}
        for pin in self.get_options().global_pinned_versions:
            artifact_tuple = (pin['org'], pin['name'])
            if artifact_tuple in global_pinned_tuples:
                raise Exception(
                    'An artifact has conflicting overrides!:\n{}:{} and\n'
                    '{}'.format(artifact_tuple, pin['rev'],
                                global_pinned_tuples[artifact_tuple]))
            global_pinned_tuples[artifact_tuple] = pin['rev']

        # Overrrides converted to { (org, name, rev): /path/to/artifact, ... }
        override_tuples = {}
        for override in self.get_options().local_override_versions:
            override_tuples[(override['org'], override['name'],
                             override['rev'])] = override['artifact_path']

        # Exclusions converted to [(org, name), ...]
        global_exclusion_tuples = []
        for exclusion in self.get_options().global_exclusions:
            global_exclusion_tuples.append(
                (exclusion['org'], exclusion['name']))

        global_exclusions = frozenset(global_exclusion_tuples)
        global_pinned_versions = dict(global_pinned_tuples)
        local_override_versions = override_tuples
        fetchers = ChainedFetcher(self.get_options().maven_repos)

        invalidation_context_manager = self.invalidated(
            self.all_jar_libs,
            invalidate_dependents=False,
            fingerprint_strategy=PomResolveFingerprintStrategy(
                global_exclusions, global_pinned_versions),
        )

        with invalidation_context_manager as invalidation_check:
            # NOTE: In terms of caching this models IvyResolve in pants quite closely. We always
            # operate over and cache in terms of the global set of jar dependencies. Note that we override
            # `check_artifact_cache_for` in order to get the artifact cache to respect this.
            global_vts = VersionedTargetSet.from_versioned_targets(
                invalidation_check.all_vts)
            vts_workdir = os.path.join(self.workdir, global_vts.cache_key.hash)
            analysis_path = os.path.join(vts_workdir, 'analysis.pickle')
            if invalidation_check.invalid_vts or not os.path.exists(
                    analysis_path):
                with self.context.new_workunit('traverse-pom-graph'):
                    global_dep_graph, target_to_dep_graph = self.resolve_dependency_graphs(
                        self.all_jar_libs,
                        fetchers,
                        global_exclusions,
                        global_pinned_versions,
                    )
                    self.report_unused_pins_and_exclusions(
                        global_dep_graph,
                        global_pinned_versions,
                        global_exclusions,
                    )
                # TODO: Not super happy about using target.id really anywhere, since it's just a name.
                # But for now this is all completely invalidated whenever any part of 3rdparty:: changes.
                # It might however be possible that just renaming a JarLib (and doing nothing else) will
                # break this.
                for target, dep_graph in target_to_dep_graph.items():
                    self.target_to_maven_coordinate_closure[target.id] = list(
                        dep_graph.artifact_closure())
                copied_coord_to_artifacts = deepcopy(
                    global_dep_graph._coord_to_provided_artifacts)
                self.maven_coordinate_to_provided_artifacts.update(
                    copied_coord_to_artifacts)
                safe_mkdir(vts_workdir)
                # NOTE: These products are only used by pom-ivy-diff, which is only there for debugging.
                # It will probably go away within a few months, at which point these products optionally
                # can too.  But they might also be useful to future downstream tasks.
                analysis = {
                    'target_to_maven_coordinate_closure':
                    self.target_to_maven_coordinate_closure,
                    'maven_coordinate_to_provided_artifacts':
                    self.maven_coordinate_to_provided_artifacts,
                    'global_dep_graph': global_dep_graph,
                }
                with open(analysis_path, 'wb') as f:
                    pickle.dump(analysis, f)
                if self.artifact_cache_writes_enabled():
                    self.update_artifact_cache([(global_vts, [analysis_path])])
            else:
                with open(analysis_path, 'rb') as f:
                    analysis = pickle.load(f)
                self.target_to_maven_coordinate_closure.update(
                    analysis['target_to_maven_coordinate_closure'], )
                self.maven_coordinate_to_provided_artifacts.update(
                    analysis['maven_coordinate_to_provided_artifacts'], )
                global_dep_graph = analysis['global_dep_graph']

        self.report_for_artifacts(global_dep_graph)
        conflicted_deps = global_dep_graph.conflicted_dependencies()
        if conflicted_deps:
            self.report_conflicted_deps(
                conflicted_deps,
                global_dep_graph.reverse_unversioned_dep_graph(),
                global_dep_graph,
            )
            raise Exception(
                'PomResolve found {} conflicting dependencies.  These must be explicitly'
                ' pinned or excluded in order to generate a consistent global classpath.'
                ' See the output above for details, and try `./pants pom-resolve --help`'
                ' for information on flags to get more detailed reporting.'.
                format(len(conflicted_deps)))

        all_artifacts = set()
        for coord_closure in self.target_to_maven_coordinate_closure.values():
            for coord in coord_closure:
                for artifact in self.maven_coordinate_to_provided_artifacts[
                        coord]:
                    all_artifacts.add(artifact)

        classpath_dump_file = self.get_options().dump_classpath_file
        if classpath_dump_file:
            with open(classpath_dump_file, 'wb') as f:
                f.write('FINGERPRINT: {}\n'.format(global_vts.cache_key.hash))
                for artifact in sorted(all_artifacts):
                    f.write('{}\n'.format(artifact))
            logger.info(
                'Dumped classpath file to {}'.format(classpath_dump_file))

        with self.context.new_workunit('fetch-artifacts'):
            coord_to_artifact_symlinks = self._fetch_artifacts(
                local_override_versions)

        if self.get_options().fetch_source_jars:
            with self.context.new_workunit('fetch-source-jars'):
                symlink_dir = os.path.join(
                    self.pom_cache_dir,
                    'source-jars-symlink-farms',
                    global_vts.cache_key.hash,
                )
                if not os.path.exists(symlink_dir):
                    self._fetch_source_jars(fetchers, symlink_dir)
                stderr('\nFetched source jars to {}'.format(symlink_dir))

        classpath_info_filename = self.get_options(
        ).write_classpath_info_to_file
        if classpath_info_filename:
            classpath_info = {
                'fingerprint':
                global_vts.cache_key.hash,
                'classpath': [{
                    'path':
                    os.path.join(self.pom_cache_dir, artifact.artifact_path),
                    'groupId':
                    artifact.groupId,
                    'artifactId':
                    artifact.artifactId,
                    'version':
                    artifact.version,
                    'packaging':
                    artifact.packaging,
                    'classifier':
                    artifact.classifier,
                } for artifact in all_artifacts],
            }
            with open(classpath_info_filename, 'w') as classpath_info_file:
                classpath_info_file.write(json.dumps(classpath_info))
            logger.info('Wrote classpath info JSON to {}.'.format(
                classpath_info_filename))

        with self.context.new_workunit('populate-compile-classpath'):
            self._populate_compile_classpath()
示例#10
0
  def execute(self):

    # Pants does no longer allows options to be tuples or sets. So we use lists of dicts and then convert into
    # hashable structures here.

    # Pins converted to { (org, name): rev, ... }
    global_pinned_tuples = {}
    for pin in self.get_options().global_pinned_versions:
      artifact_tuple = (pin['org'], pin['name'])
      if artifact_tuple in global_pinned_tuples:
        raise Exception('An artifact has conflicting overrides!:\n{}:{} and\n'
          '{}'.format(artifact_tuple, pin['rev'], global_pinned_tuples[artifact_tuple]))
      global_pinned_tuples[artifact_tuple] = pin['rev']

    # Overrrides converted to { (org, name, rev): /path/to/artifact, ... }
    override_tuples = {}
    for override in self.get_options().local_override_versions:
      override_tuples[(override['org'], override['name'], override['rev'])] = override['artifact_path']

    # Exclusions converted to [(org, name), ...]
    global_exclusion_tuples = []
    for exclusion in self.get_options().global_exclusions:
      global_exclusion_tuples.append((exclusion['org'], exclusion['name']))

    global_exclusions = frozenset(global_exclusion_tuples)
    global_pinned_versions = dict(global_pinned_tuples)
    local_override_versions = override_tuples
    fetchers = ChainedFetcher(self.get_options().maven_repos)

    invalidation_context_manager = self.invalidated(
      self.all_jar_libs,
      invalidate_dependents=False,
      fingerprint_strategy=PomResolveFingerprintStrategy(global_exclusions, global_pinned_versions),
    )

    with invalidation_context_manager as invalidation_check:
      # NOTE: In terms of caching this models IvyResolve in pants quite closely. We always
      # operate over and cache in terms of the global set of jar dependencies. Note that we override
      # `check_artifact_cache_for` in order to get the artifact cache to respect this.
      global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
      vts_workdir = os.path.join(self.workdir, global_vts.cache_key.hash)
      analysis_path = os.path.join(vts_workdir, 'analysis.pickle')
      if invalidation_check.invalid_vts or not os.path.exists(analysis_path):
        with self.context.new_workunit('traverse-pom-graph'):
          global_dep_graph, target_to_dep_graph = self.resolve_dependency_graphs(
            self.all_jar_libs,
            fetchers,
            global_exclusions,
            global_pinned_versions,
          )
          self.report_unused_pins_and_exclusions(
            global_dep_graph,
            global_pinned_versions,
            global_exclusions,
          )
        # TODO: Not super happy about using target.id really anywhere, since it's just a name.
        # But for now this is all completely invalidated whenever any part of 3rdparty:: changes.
        # It might however be possible that just renaming a JarLib (and doing nothing else) will
        # break this.
        for target, dep_graph in target_to_dep_graph.items():
          self.target_to_maven_coordinate_closure[target.id] = list(dep_graph.artifact_closure())
        copied_coord_to_artifacts = deepcopy(global_dep_graph._coord_to_provided_artifacts)
        self.maven_coordinate_to_provided_artifacts.update(copied_coord_to_artifacts)
        safe_mkdir(vts_workdir)
        # NOTE: These products are only used by pom-ivy-diff, which is only there for debugging.
        # It will probably go away within a few months, at which point these products optionally
        # can too.  But they might also be useful to future downstream tasks.
        analysis = {
          'target_to_maven_coordinate_closure': self.target_to_maven_coordinate_closure,
          'maven_coordinate_to_provided_artifacts': self.maven_coordinate_to_provided_artifacts,
          'global_dep_graph': global_dep_graph,
        }
        with open(analysis_path, 'wb') as f:
          pickle.dump(analysis, f)
        if self.artifact_cache_writes_enabled():
          self.update_artifact_cache([(global_vts, [analysis_path])])
      else:
        with open(analysis_path, 'rb') as f:
          analysis = pickle.load(f)
        self.target_to_maven_coordinate_closure.update(
          analysis['target_to_maven_coordinate_closure'],
        )
        self.maven_coordinate_to_provided_artifacts.update(
          analysis['maven_coordinate_to_provided_artifacts'],
        )
        global_dep_graph = analysis['global_dep_graph']

    self.report_for_artifacts(global_dep_graph)
    conflicted_deps = global_dep_graph.conflicted_dependencies()
    if conflicted_deps:
      self.report_conflicted_deps(
        conflicted_deps,
        global_dep_graph.reverse_unversioned_dep_graph(),
        global_dep_graph,
      )
      raise Exception(
        'PomResolve found {} conflicting dependencies.  These must be explicitly'
        ' pinned or excluded in order to generate a consistent global classpath.'
        ' See the output above for details, and try `./pants pom-resolve --help`'
        ' for information on flags to get more detailed reporting.'
        .format(len(conflicted_deps)))

    all_artifacts = set()
    for coord_closure in self.target_to_maven_coordinate_closure.values():
      for coord in coord_closure:
        for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
          all_artifacts.add(artifact)

    classpath_dump_file = self.get_options().dump_classpath_file
    if classpath_dump_file:
      with open(classpath_dump_file, 'wb') as f:
          f.write('FINGERPRINT: {}\n'.format(global_vts.cache_key.hash))
          for artifact in sorted(all_artifacts):
            f.write('{}\n'.format(artifact))
      logger.info('Dumped classpath file to {}'.format(classpath_dump_file))

    with self.context.new_workunit('fetch-artifacts'):
      coord_to_artifact_symlinks = self._fetch_artifacts(local_override_versions)

    if self.get_options().fetch_source_jars:
      with self.context.new_workunit('fetch-source-jars'):
        symlink_dir = os.path.join(
          self.pom_cache_dir,
          'source-jars-symlink-farms',
          global_vts.cache_key.hash,
        )
        if not os.path.exists(symlink_dir):
          self._fetch_source_jars(fetchers, symlink_dir)
        stderr('\nFetched source jars to {}'.format(symlink_dir))

    classpath_info_filename = self.get_options().write_classpath_info_to_file
    if classpath_info_filename:
      classpath_info = {
        'fingerprint': global_vts.cache_key.hash,
        'classpath': [
           {
             'path': os.path.join(self.pom_cache_dir, artifact.artifact_path),
             'groupId': artifact.groupId,
             'artifactId': artifact.artifactId,
             'version': artifact.version,
             'packaging': artifact.packaging,
             'classifier': artifact.classifier,
           }
           for artifact in all_artifacts
        ],
      }
      with open(classpath_info_filename, 'w') as classpath_info_file:
        classpath_info_file.write(json.dumps(classpath_info))
      logger.info('Wrote classpath info JSON to {}.'.format(classpath_info_filename))

    with self.context.new_workunit('populate-compile-classpath'):
      self._populate_compile_classpath()