Exemplo n.º 1
0
  def execute(self):
    binaries = self.context.targets(self.is_binary)

    # Check for duplicate binary names, since we write the pexes to <dist>/<name>.pex.
    names = {}
    for binary in binaries:
      name = binary.name
      if name in names:
        raise TaskError('Cannot build two binaries with the same name in a single invocation. '
                        '{} and {} both have the name {}.'.format(binary, names[name], name))
      names[name] = binary

    with self.invalidated(binaries, invalidate_dependents=True) as invalidation_check:
      python_deployable_archive = self.context.products.get('deployable_archives')
      python_pex_product = self.context.products.get('pex_archives')
      for vt in invalidation_check.all_vts:
        pex_path = os.path.join(vt.results_dir, '{}.pex'.format(vt.target.name))
        if not vt.valid:
          self.context.log.debug('cache for {} is invalid, rebuilding'.format(vt.target))
          self._create_binary(vt.target, vt.results_dir)
        else:
          self.context.log.debug('using cache for {}'.format(vt.target))

        basename = os.path.basename(pex_path)
        python_pex_product.add(binary, os.path.dirname(pex_path)).append(basename)
        python_deployable_archive.add(binary, os.path.dirname(pex_path)).append(basename)
        self.context.log.debug('created {}'.format(os.path.relpath(pex_path, get_buildroot())))

        # Create a copy for pex.
        pex_copy = os.path.join(self._distdir, os.path.basename(pex_path))
        safe_mkdir_for(pex_copy)
        atomic_copy(pex_path, pex_copy)
        self.context.log.info('created pex {}'.format(os.path.relpath(pex_copy, get_buildroot())))
Exemplo n.º 2
0
  def console_output(self, _):
    buildfiles = OrderedSet()
    if self._dependees_type:
      base_paths = OrderedSet()
      for dependees_type in self._dependees_type:
        # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser
        # machinery
        try:
          # Try to do a fully qualified import 1st for filtering on custom types.
          from_list, module, type_name = dependees_type.rsplit('.', 2)
          module = __import__('%s.%s' % (from_list, module), fromlist=[from_list])
          target_type = getattr(module, type_name)
        except (ImportError, ValueError):
          # Fall back on pants provided target types.
          registered_aliases = self.context.build_file_parser.registered_aliases()
          if dependees_type not in registered_aliases.targets:
            raise TaskError('Invalid type name: %s' % dependees_type)
          target_type = registered_aliases.targets[dependees_type]

        # Try to find the SourceRoot for the given input type
        try:
          roots = SourceRoot.roots(target_type)
          base_paths.update(roots)
        except KeyError:
          pass

      if not base_paths:
        raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type +
                        '\nPlease define a source root in BUILD file as:' +
                        '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type))
      for base_path in base_paths:
        buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(),
                                                    os.path.join(get_buildroot(), base_path)))
    else:
      buildfiles = BuildFile.scan_buildfiles(get_buildroot())

    build_graph = self.context.build_graph
    build_file_parser = self.context.build_file_parser

    dependees_by_target = defaultdict(set)
    for build_file in buildfiles:
      build_file_parser.parse_build_file(build_file)
      for address in build_file_parser.addresses_by_build_file[build_file]:
        build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph)
      for address in build_file_parser.addresses_by_build_file[build_file]:
        target = build_graph.get_target(address)
        # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a
        # user vs. targets created by pants at runtime.
        target = self.get_concrete_target(target)
        for dependency in target.dependencies:
          dependency = self.get_concrete_target(dependency)
          dependees_by_target[dependency].add(target)

    roots = set(self.context.target_roots)
    if self._closed:
      for root in roots:
        yield root.address.spec

    for dependant in self.get_dependants(dependees_by_target, roots):
      yield dependant.address.spec
Exemplo n.º 3
0
  def __call__(self, *args, **kwargs):
    root = os.path.join(get_buildroot(), self.rel_path)

    excludes = kwargs.pop('exclude', [])
    if isinstance(excludes, string_types):
        raise ValueError("Expected exclude parameter to be a list of globs, lists, or strings")

    for i, exclude in enumerate(excludes):
      if isinstance(exclude, string_types):
        # You can't subtract raw strings from globs
        excludes[i] = [exclude]

    for glob in args:
      if(self._is_glob_dir_outside_root(glob, root)):
        raise ValueError('Invalid glob {}, points outside BUILD file root dir {}'.format(glob, root))

    result = self.wrapped_fn(root=root, *args, **kwargs)

    for exclude in excludes:
      result -= exclude

    buildroot = get_buildroot()
    rel_root = os.path.relpath(root, buildroot)
    filespec = self.to_filespec(args, root=rel_root, excludes=excludes)
    return FilesetWithSpec(rel_root, result, filespec)
Exemplo n.º 4
0
  def _compile(self, target, source):
    """Compile given source to an object file."""
    obj = self._objpath(target, source)
    safe_mkdir(os.path.dirname(obj))

    abs_source = os.path.join(get_buildroot(), source)

    # TODO: include dir should include dependent work dir when headers are copied there.
    include_dirs = []
    for dep in target.dependencies:
      if self.is_library(dep):
        include_dirs.extend([os.path.join(get_buildroot(), dep.target_base)])

    cmd = [self.cpp_toolchain.compiler]
    cmd.extend(['-c'])
    cmd.extend(('-I{0}'.format(i) for i in include_dirs))
    cmd.extend(['-o' + obj, abs_source])
    if self.get_options().cc_options != None:
      cmd.extend([self.get_options().cc_options])

    # TODO: submit_async_work with self.run_command, [(cmd)] as a Work object.
    with self.context.new_workunit(name='cpp-compile', labels=[WorkUnit.COMPILER]) as workunit:
      self.run_command(cmd, workunit)

    self.context.log.info('Built c++ object: {0}'.format(obj))
Exemplo n.º 5
0
  def mock_buildroot(self, dirs_to_copy=None):
    """Construct a mock buildroot and return a helper object for interacting with it."""
    Manager = namedtuple('Manager', 'write_file pushd dir')
    # N.B. BUILD.tools, contrib, 3rdparty needs to be copied vs symlinked to avoid
    # symlink prefix check error in v1 and v2 engine.
    files_to_copy = ('BUILD.tools',)
    files_to_link = ('pants', 'pants.ini', 'pants.travis-ci.ini', '.pants.d',
                     'build-support', 'pants-plugins', 'src')
    dirs_to_copy = ('contrib', '3rdparty') + tuple(dirs_to_copy or [])

    with self.temporary_workdir() as tmp_dir:
      for filename in files_to_copy:
        shutil.copy(os.path.join(get_buildroot(), filename), os.path.join(tmp_dir, filename))

      for dirname in dirs_to_copy:
        shutil.copytree(os.path.join(get_buildroot(), dirname), os.path.join(tmp_dir, dirname))

      for filename in files_to_link:
        os.symlink(os.path.join(get_buildroot(), filename), os.path.join(tmp_dir, filename))

      def write_file(file_path, contents):
        full_file_path = os.path.join(tmp_dir, *file_path.split(os.pathsep))
        safe_mkdir_for(full_file_path)
        with open(full_file_path, 'wb') as fh:
          fh.write(contents)

      @contextmanager
      def dir_context():
        with pushd(tmp_dir):
          yield

      yield Manager(write_file, dir_context, tmp_dir)
Exemplo n.º 6
0
 def do_check(path):
   """Check to see that the project contains the expected source folders."""
   found_source_content = False
   iml_file = os.path.join(path, 'project.iml')
   self.assertTrue(os.path.exists(iml_file))
   dom = minidom.parse(iml_file)
   expected_paths = ["file://" + os.path.join(get_buildroot(), _path) for _path in [
     'examples/src/java/org/pantsbuild/example/hello',
     'examples/src/java/org/pantsbuild/example/hello/greet',
     'examples/src/java/org/pantsbuild/example/hello/main',
     'examples/src/java/org/pantsbuild/example/hello/simple',
     'examples/src/resources/org/pantsbuild/example/hello',
   ]]
   expected_java_resource = ["file://" + os.path.join(get_buildroot(), _path) for _path in [
     'examples/src/resources/org/pantsbuild/example/hello',
   ]]
   remaining = set(expected_paths)
   for sourceFolder in self._get_sourceFolders(dom):
     found_source_content = True
     self.assertEquals("False", sourceFolder.getAttribute('isTestSource'))
     url = sourceFolder.getAttribute('url')
     # Check is resource attribute is set correctly
     if url in expected_java_resource:
       self.assertEquals(sourceFolder.getAttribute('type'), IdeaIntegrationTest.RESOURCE,
                         msg="Type {c_type} does not match expected type {a_type} "
                             "for {url}".format(c_type=IdeaIntegrationTest.RESOURCE, url=url,
                                                a_type=sourceFolder.getAttribute('type')))
     self.assertIn(url, remaining,
                    msg="Couldn't find url={url} in {expected}".format(url=url,
                                                                       expected=expected_paths))
     remaining.remove(url)
   self.assertTrue(found_source_content)
Exemplo n.º 7
0
  def _make_compile_request(self, versioned_target):
    target = versioned_target.target

    include_dirs = []
    for dep in self.native_deps(target):
      source_lib_base_dir = os.path.join(get_buildroot(),
                                         dep._sources_field.rel_path)
      include_dirs.append(source_lib_base_dir)
    for ext_dep in self.packaged_native_deps(target):
      external_lib_include_dir = os.path.join(get_buildroot(),
                                              ext_dep._sources_field.rel_path,
                                              ext_dep.include_relpath)
      self.context.log.debug('ext_dep: {}, external_lib_include_dir: {}'
                             .format(ext_dep, external_lib_include_dir))
      include_dirs.append(external_lib_include_dir)

    sources_and_headers = self.get_sources_headers_for_target(target)
    compiler_option_sets = (self._compile_settings.native_build_step
                                .get_compiler_option_sets_for_target(target))
    self.context.log.debug('target: {}, compiler_option_sets: {}'.format(target, compiler_option_sets))

    compile_request = NativeCompileRequest(
      compiler=self._compiler(target),
      include_dirs=include_dirs,
      sources=sources_and_headers,
      compiler_options=(self._compile_settings
                            .native_build_step
                            .get_merged_args_for_compiler_option_sets(compiler_option_sets)),
      output_dir=versioned_target.results_dir,
      header_file_extensions=self._compile_settings.header_file_extensions)

    self.context.log.debug(repr(compile_request))

    return compile_request
Exemplo n.º 8
0
  def create_parser(defaults=None):
    """Creates a config parser that supports %([key-name])s value substitution.

    Any defaults supplied will act as if specified in the loaded config file's DEFAULT section and
    be available for substitutions.

    All of the following are seeded with defaults in the config
      user: the current user
      homedir: the current user's home directory
      buildroot: the root of this repo
      pants_bootstrapdir: the global pants scratch space primarily used for caches
      pants_supportdir: pants support files for this repo go here; for example: ivysettings.xml
      pants_distdir: user visible artifacts for this repo go here
      pants_workdir: the scratch space used to for live builds in this repo
    """
    standard_defaults = dict(
      buildroot=get_buildroot(),
      homedir=os.path.expanduser('~'),
      user=getpass.getuser(),
      pants_bootstrapdir=os.path.expanduser('~/.pants.d'),
      pants_workdir=os.path.join(get_buildroot(), '.pants.d'),
      pants_supportdir=os.path.join(get_buildroot(), 'build-support'),
      pants_distdir=os.path.join(get_buildroot(), 'dist')
    )
    if defaults:
      standard_defaults.update(defaults)
    return ConfigParser.SafeConfigParser(standard_defaults)
Exemplo n.º 9
0
  def _capture_sources(self, targets_and_dirs):
    to_capture = []
    results_dirs = []
    filespecs = []

    for target, synthetic_target_dir in targets_and_dirs:
      files = self.sources_globs

      results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot())
      buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files)
      buildroot_relative_excludes = tuple(
        os.path.join(results_dir_relpath, file)
          for file in self.sources_exclude_globs
      )
      to_capture.append(
        PathGlobsAndRoot(
          PathGlobs(buildroot_relative_globs, buildroot_relative_excludes),
          text_type(get_buildroot()),
        )
      )
      results_dirs.append(results_dir_relpath)
      filespecs.append(FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

    snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture))

    return tuple(EagerFilesetWithSpec(
      results_dir_relpath,
      filespec,
      snapshot,
    ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots))
Exemplo n.º 10
0
  def __init__(self, *args, **kwargs):
    super(BaseZincCompile, self).__init__(*args, **kwargs)
    # A directory to contain per-target subdirectories with apt processor info files.
    self._processor_info_dir = os.path.join(self.workdir, 'apt-processor-info')

    # Validate zinc options.
    ZincCompile.validate_arguments(self.context.log, self.get_options().whitelisted_args,
                                   self._args)
    if self.execution_strategy == self.HERMETIC:
      # TODO: Make incremental compiles work. See:
      # hermetically https://github.com/pantsbuild/pants/issues/6517
      if self.get_options().incremental:
        raise TaskError("Hermetic zinc execution does not currently support incremental compiles. "
                        "Please use --no-compile-zinc-incremental.")
      try:
        fast_relpath(self.get_options().pants_workdir, get_buildroot())
      except ValueError:
        raise TaskError(
          "Hermetic zinc execution currently requires the workdir to be a child of the buildroot "
          "but workdir was {} and buildroot was {}".format(
            self.get_options().pants_workdir,
            get_buildroot(),
          )
        )

      if self.get_options().use_classpath_jars:
        # TODO: Make this work by capturing the correct Digest and passing them around the
        # right places.
        # See https://github.com/pantsbuild/pants/issues/6432
        raise TaskError("Hermetic zinc execution currently doesn't work with classpath jars")
Exemplo n.º 11
0
  def __call__(self, rel_path=None, mapper=None, relative_to=None, fileset=None):
    """
    :param rel_path: Base path of the "source" file paths. By default, path of the
      BUILD file. Useful for assets that don't live in the source code repo.
    :param mapper: Function that takes a path string and returns a path string. Takes a path in
      the source tree, returns a path to use in the resulting bundle. By default, an identity
      mapper.
    :param string relative_to: Set up a simple mapping from source path to bundle path.
    :param fileset: The set of files to include in the bundle.  A string filename, or list of
      filenames, or a Fileset object (e.g. globs()).
      E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle.
    """
    if mapper and relative_to:
      raise ValueError("Must specify exactly one of 'mapper' or 'relative_to'")

    rel_path = rel_path or self._rel_path
    filemap = {}

    if relative_to:
      base = os.path.join(get_buildroot(), rel_path, relative_to)
      mapper = RelativeToMapper(base)
    else:
      mapper = mapper or RelativeToMapper(os.path.join(get_buildroot(), rel_path))

    if fileset is not None:
      paths = fileset() if isinstance(fileset, Fileset) \
        else fileset if hasattr(fileset, '__iter__') \
        else [fileset]
      for path in paths:
        abspath = path
        if not os.path.isabs(abspath):
          abspath = os.path.join(get_buildroot(), rel_path, path)
        filemap[abspath] = mapper(abspath)

    return BundleProps(self._rel_path, mapper, filemap, fileset)
Exemplo n.º 12
0
def get_project_tree(options):
  """Creates the project tree for build files for use in a given pants run."""
  pants_ignore = options.pants_ignore or []
  if options.build_file_rev:
    return ScmProjectTree(get_buildroot(), get_scm(), options.build_file_rev, pants_ignore)
  else:
    return FileSystemProjectTree(get_buildroot(), pants_ignore)
Exemplo n.º 13
0
  def _do_check(self, project_dir_path, expected_project_path, expected_targets):
    """Check to see that the project contains the expected source folders."""

    iws_file = os.path.join(project_dir_path, 'project.iws')
    self.assertTrue(os.path.exists(iws_file))
    dom = minidom.parse(iws_file)
    self.assertEqual(1, len(dom.getElementsByTagName("project")))
    project = dom.getElementsByTagName("project")[0]

    self.assertEqual(1, len(project.getElementsByTagName('component')))
    component = project.getElementsByTagName('component')[0]

    actual_properties = component.getElementsByTagName('property')
    # 3 properties: targets, project_path, pants_idea_plugin_version
    self.assertEqual(3, len(actual_properties))

    self.assertEqual('targets', actual_properties[0].getAttribute('name'))
    actual_targets = json.loads(actual_properties[0].getAttribute('value'))
    abs_expected_target_specs = [os.path.join(get_buildroot(), relative_spec) for relative_spec in expected_targets]
    self.assertEqual(abs_expected_target_specs, actual_targets)

    self.assertEqual('project_path', actual_properties[1].getAttribute('name'))
    actual_project_path = actual_properties[1].getAttribute('value')
    self.assertEqual(os.path.join(get_buildroot(), expected_project_path), actual_project_path)

    self.assertEqual('pants_idea_plugin_version', actual_properties[2].getAttribute('name'))
    self.assertEqual('0.0.1', actual_properties[2].getAttribute('value'))
  def test_analysis_portability(self):
    # Tests that analysis can be relocated between workdirs and still result in incremental
    # compile.
    with temporary_dir() as cache_dir, temporary_dir(root_dir=get_buildroot()) as src_dir, \
      temporary_dir(root_dir=get_buildroot(), suffix='.pants.d') as workdir:
      config = {
        'cache.compile.zinc': {'write_to': [cache_dir], 'read_from': [cache_dir]},
      }

      dep_src_file = os.path.join(src_dir, 'org', 'pantsbuild', 'dep', 'A.scala')
      dep_build_file = os.path.join(src_dir, 'org', 'pantsbuild', 'dep', 'BUILD')
      con_src_file = os.path.join(src_dir, 'org', 'pantsbuild', 'consumer', 'B.scala')
      con_build_file = os.path.join(src_dir, 'org', 'pantsbuild', 'consumer', 'BUILD')

      dep_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild', 'dep')
      con_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild', 'consumer')

      dep_src = "package org.pantsbuild.dep; class A {}"

      self.create_file(dep_src_file, dep_src)
      self.create_file(dep_build_file, "scala_library()")
      self.create_file(con_src_file, dedent(
        """package org.pantsbuild.consumer
           import org.pantsbuild.dep.A
           class B { def mkA: A = new A() }"""))
      self.create_file(con_build_file, "scala_library(dependencies=['{}'])".format(dep_spec))

      rel_workdir = fast_relpath(workdir, get_buildroot())
      rel_src_dir = fast_relpath(src_dir, get_buildroot())
      with self.mock_buildroot(dirs_to_copy=[rel_src_dir, rel_workdir]) as buildroot, \
        buildroot.pushd():
        # 1) Compile in one buildroot.
        self.run_compile(con_spec, config, os.path.join(buildroot.new_buildroot, rel_workdir))

      with self.mock_buildroot(dirs_to_copy=[rel_src_dir, rel_workdir]) as buildroot, \
        buildroot.pushd():
        # 2) Compile in another buildroot, and check that we hit the cache.
        new_workdir = os.path.join(buildroot.new_buildroot, rel_workdir)
        run_two = self.run_compile(con_spec, config, new_workdir)
        self.assertTrue(
            re.search(
              "\[zinc\][^[]*\[cache\][^[]*Using cached artifacts for 2 targets.",
              run_two.stdout_data),
            run_two.stdout_data)

        # 3) Edit the dependency in a way that should trigger an incremental
        #    compile of the consumer.
        mocked_dep_src_file = os.path.join(
          buildroot.new_buildroot,
          fast_relpath(dep_src_file, get_buildroot()))
        self.create_file(mocked_dep_src_file, dep_src + "; /* this is a comment */")

        # 4) Compile and confirm that the analysis fetched from the cache in
        #    step 2 causes incrementalism: ie, zinc does not report compiling any files.
        run_three = self.run_compile(con_spec, config, new_workdir)
        self.assertTrue(
            re.search(
              r"/org/pantsbuild/consumer:consumer\)[^[]*\[compile\][^[]*\[zinc\]\W*\[info\] Compile success",
              run_three.stdout_data),
            run_three.stdout_data)
Exemplo n.º 15
0
 def configure_python(self, source_roots, test_roots, lib_roots):
   self.py_sources.extend(SourceSet(get_buildroot(), root, None, False) for root in source_roots)
   self.py_sources.extend(SourceSet(get_buildroot(), root, None, True) for root in test_roots)
   for root in lib_roots:
     for path in os.listdir(os.path.join(get_buildroot(), root)):
       if os.path.isdir(os.path.join(get_buildroot(), root, path)) or path.endswith('.egg'):
         self.py_libs.append(SourceSet(get_buildroot(), root, path, False))
Exemplo n.º 16
0
  def execute_codegen(self, target, target_workdir):
    target_cmd = [self._protoc]

    protoc_gen_go = ProtocGenGo.global_instance().select(self.context)
    env = os.environ.copy()
    env['PATH'] = ':'.join([os.path.dirname(protoc_gen_go), env['PATH']])

    bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt))
    for base in bases:
      target_cmd.append('-I={}'.format(os.path.join(get_buildroot(), base)))

    outdir = os.path.join(target_workdir, 'src', 'go')
    safe_mkdir(outdir)
    target_cmd.append('--go_out={}'.format(outdir))

    all_sources = list(target.sources_relative_to_buildroot())
    for source in all_sources:
      file_cmd = target_cmd + [os.path.join(get_buildroot(), source)]
      with self.context.new_workunit(name=source,
                                     labels=[WorkUnitLabel.TOOL],
                                     cmd=' '.join(file_cmd)) as workunit:
        self.context.log.info(' '.join(file_cmd))
        result = subprocess.call(file_cmd,
                                 env=env,
                                 stdout=workunit.output('stdout'),
                                 stderr=workunit.output('stderr'))
        if result != 0:
          raise TaskError('{} ... exited non-zero ({})'.format(self._protoc, result))
Exemplo n.º 17
0
  def test_file_have_coding_utf8(self):
    """
    Look through all .py files and ensure they start with the line '# coding=utf8'
    """
    build_file_parser = BuildFileParser(get_buildroot())
    load_backends_from_source(build_file_parser)

    def has_hand_coded_python_files(tgt):
      return (not tgt.is_synthetic) and tgt.is_original and tgt.has_sources('.py')

    nonconforming_files = []
    for target in build_file_parser.scan().targets(has_hand_coded_python_files):
      for src in target.sources_relative_to_buildroot():
        with open(os.path.join(get_buildroot(), src), 'r') as python_file:
          coding_line = python_file.readline()
          if '' == coding_line and os.path.basename(src) == '__init__.py':
            continue
          if coding_line[0:2] == '#!':
            # Executable file:  look for the coding on the second line.
            coding_line = python_file.readline()
          if not coding_line.rstrip() == '# coding=utf-8':
            nonconforming_files.append(src)

    if len(nonconforming_files) > 0:
      self.fail('Expected these files to contain first line "# coding=utf8": '
                + str(nonconforming_files))
Exemplo n.º 18
0
def reset_default_bootstrap_option_values(defaults, values=None):
  """Reset the bootstrap options' default values.

  :param defaults: The dict to set the values on.
  :param values: A namespace containing the values to set. If unspecified, uses hard-coded defaults.

  The bootstrapping code will use this to set the bootstrapped values. Code that doesn't trigger
  bootstrapping (i.e., the one remaining old-style command) will get the hard-coded defaults, as
  it did before.

  It's a code smell to update nominally static data dynamically, but this is temporary,
  and saves us having to plumb things through to all the Config.from_cache() call sites.

  This method is also called in tests of this code, to reset state for unrelated tests.

  TODO: Remove after all direct config reads have been subsumed into the options system,
        which can pass these into Config.load() itself after bootstrapping them.
  """

  if values:
    defaults.update({
      'pants_workdir': values.pants_workdir,
      'pants_supportdir': values.pants_supportdir,
      'pants_distdir': values.pants_distdir
    })
  else:
    defaults.update({
      'pants_workdir': os.path.join(get_buildroot(), '.pants.d'),
      'pants_supportdir': os.path.join(get_buildroot(), 'build-support'),
      'pants_distdir': os.path.join(get_buildroot(), 'dist')
    })
Exemplo n.º 19
0
  def __init__(self, parse_context, rel_path=None, mapper=None, relative_to=None, fileset=None):
    """
    :param rel_path: Base path of the "source" file paths. By default, path of the
      BUILD file. Useful for assets that don't live in the source code repo.
    :param mapper: Function that takes a path string and returns a path string. Takes a path in
      the source tree, returns a path to use in the resulting bundle. By default, an identity
      mapper.
    :param string relative_to: Set up a simple mapping from source path to bundle path.
    :param fileset: The set of files to include in the bundle.  A string filename, or list of
      filenames, or a Fileset object (e.g. globs()).
      E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle.
    """
    if mapper and relative_to:
      raise ValueError("Must specify exactly one of 'mapper' or 'relative_to'")

    self._rel_path = rel_path or parse_context.rel_path
    self.filemap = {}

    if relative_to:
      base = os.path.join(get_buildroot(), self._rel_path, relative_to)
      self.mapper = RelativeToMapper(base)
    else:
      self.mapper = mapper or RelativeToMapper(os.path.join(get_buildroot(), self._rel_path))

    if fileset is not None:
      self._add([fileset])
    self.fileset = fileset
 def test_pants_binary_dep_isolation_with_multiple_targets(self):
   pex1 = os.path.join(get_buildroot(), 'dist', 'main_with_no_conflict.pex')
   pex2 = os.path.join(get_buildroot(), 'dist', 'main_with_no_pycountry.pex')
   try:
     command=['binary', '{}:main_with_no_conflict'.format(self.fasthello_install_requires),
              '{}:main_with_no_pycountry'.format(self.fasthello_install_requires)]
     pants_run = self.run_pants(command=command)
     self.assert_success(pants_run)
     # Check that the pex was built.
     self.assertTrue(os.path.isfile(pex1))
     self.assertTrue(os.path.isfile(pex2))
     # Check that the pex 1 runs.
     output = subprocess.check_output(pex1)
     self._assert_native_greeting(output)
     # Check that the pex 2 fails due to no python_dists leaked into it.
     try:
       output = subprocess.check_output(pex2)
     except subprocess.CalledProcessError as e:
       self.assertNotEquals(0, e.returncode)
   finally:
     # Cleanup
     if os.path.exists(pex1):
       os.remove(pex1)
     if os.path.exists(pex2):
       os.remove(pex2)
Exemplo n.º 21
0
  def _objpath(self, target, source):
    abs_source_root = os.path.join(get_buildroot(), target.target_base)
    abs_source = os.path.join(get_buildroot(), source)
    rel_source = os.path.relpath(abs_source, abs_source_root)
    root, _ = os.path.splitext(rel_source)
    obj_name = root + '.o'

    return os.path.join(self.workdir, target.id, obj_name)
Exemplo n.º 22
0
  def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args,
                        compiler_bridge_classpath_entry, dependency_classpath,
                        scalac_classpath_entries):
    zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot())

    snapshots = [
      self._zinc.snapshot(self.context._scheduler),
      ctx.target.sources_snapshot(self.context._scheduler),
    ]

    relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry]
    directory_digests = tuple(
      entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest
    )
    if len(directory_digests) != len(relevant_classpath_entries):
      for dep in relevant_classpath_entries:
        if dep.directory_digest is None:
          logger.warning(
            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
            "execution".format(dep)
          )

    snapshots.extend(
      classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries
    )

    # TODO: Extract something common from Executor._create_command to make the command line
    # TODO: Lean on distribution for the bin/java appending here
    merged_input_digest = self.context._scheduler.merge_directories(
      tuple(s.directory_digest for s in snapshots) + directory_digests
    )
    argv = ['.jdk/bin/java'] + jvm_options + [
      '-cp', zinc_relpath,
      Zinc.ZINC_COMPILE_MAIN
    ] + zinc_args
    # TODO(#6071): Our ExecuteProcessRequest expects a specific string type for arguments,
    # which py2 doesn't default to. This can be removed when we drop python 2.
    argv = [text_type(arg) for arg in argv]

    req = ExecuteProcessRequest(
      argv=tuple(argv),
      input_files=merged_input_digest,
      output_directories=(classes_dir,),
      description="zinc compile for {}".format(ctx.target.address.spec),
      # TODO: These should always be unicodes
      # Since this is always hermetic, we need to use `underlying_dist`
      jdk_home=text_type(self._zinc.underlying_dist.home),
    )
    res = self.context.execute_process_synchronously_or_raise(
      req, self.name(), [WorkUnitLabel.COMPILER])

    # TODO: Materialize as a batch in do_compile or somewhere
    self.context._scheduler.materialize_directories((
      DirectoryToMaterialize(get_buildroot(), res.output_directory_digest),
    ))

    # TODO: This should probably return a ClasspathEntry rather than a Digest
    return res.output_directory_digest
  def known_commits(self):
    with temporary_dir(root_dir=get_buildroot()) as worktree:
      with safe_open(os.path.join(worktree, 'README'), 'w') as fp:
        fp.write('Just a test tree.')

      with initialize_repo(worktree=worktree, gitdir=os.path.join(worktree, '.git')) as git:
        src_file = os.path.join(worktree, 'src/java/org/pantsbuild/Class.java')
        with safe_open(src_file, 'w') as fp:
          fp.write(dedent("""
          package org.pantsbuild;

          class Class {
            static final int MEANING_OF_LIFE = 42;
          }
          """))

        src_build_file = os.path.join(worktree, 'src/java/org/pantsbuild/BUILD')
        with safe_open(src_build_file, 'w') as fp:
          fp.write("java_library(name='pantsbuild', sources=['Class.java'])")

        git.add(src_file, src_build_file)
        git.commit('Introduce Class.')

        test_file = os.path.join(worktree, 'tests/java/org/pantsbuild/ClassTest.java')
        with safe_open(test_file, 'w') as fp:
          fp.write(dedent("""
          package org.pantsbuild;

          import org.junit.Assert;
          import org.junit.Test;

          public class ClassTest {
            @Test public void test() {
              Assert.assertEquals(42, Class.MEANING_OF_LIFE);
            }
          }
          """))

        test_build_file = os.path.join(worktree, 'tests/java/org/pantsbuild/BUILD')
        with safe_open(test_build_file, 'w') as fp:
          fp.write(dedent("""
          jar_library(name='junit', jars=[jar('junit', 'junit', '4.12')])

          junit_tests(
            name='pantsbuild',
            sources=['ClassTest.java'],
            dependencies=[
              ':junit',
              '{}'
            ]
          )
          """).format(os.path.relpath(os.path.dirname(src_build_file), get_buildroot())))

        git.add(test_file, test_build_file)
        git.commit('Introduce ClassTest.')

        yield
Exemplo n.º 24
0
  def _do_run_tests(self, targets):
    if not targets:
      return PythonTestResult.rc(0)

    buildroot = get_buildroot()
    source_chroot = os.path.relpath(
      self.context.products.get_data(GatherSources.PYTHON_SOURCES).path(), buildroot)
    sources_map = {}  # Path from chroot -> Path from buildroot.
    for t in targets:
      for p in t.sources_relative_to_source_root():
        sources_map[os.path.join(source_chroot, p)] = os.path.join(t.target_base, p)

    if not sources_map:
      return PythonTestResult.rc(0)

    with self._test_runner(targets, sources_map) as (pex, test_args):
      # Validate that the user didn't provide any passthru args that conflict
      # with those we must set ourselves.
      for arg in self.get_passthru_args():
        if arg.startswith('--junitxml') or arg.startswith('--confcutdir'):
          raise TaskError('Cannot pass this arg through to pytest: {}'.format(arg))

      junitxml_path = self._get_junit_xml_path(targets)
      # N.B. the `--confcutdir` here instructs pytest to stop scanning for conftest.py files at the
      # top of the buildroot. This prevents conftest.py files from outside (e.g. in users home dirs)
      # from leaking into pants test runs. See: https://github.com/pantsbuild/pants/issues/2726
      args = ['--junitxml', junitxml_path, '--confcutdir', get_buildroot(),
              '--continue-on-collection-errors']
      if self.get_options().fail_fast:
        args.extend(['-x'])
      if self._debug:
        args.extend(['-s'])
      if self.get_options().colors:
        args.extend(['--color', 'yes'])
      for options in self.get_options().options + self.get_passthru_args():
        args.extend(safe_shlex_split(options))
      args.extend(test_args)
      args.extend(sources_map.keys())

      result = self._do_run_tests_with_args(pex, args)
      external_junit_xml_dir = self.get_options().junit_xml_dir
      if external_junit_xml_dir:
        safe_mkdir(external_junit_xml_dir)
        shutil.copy(junitxml_path, external_junit_xml_dir)
      failed_targets = self._get_failed_targets_from_junitxml(junitxml_path, targets)

      def parse_error_handler(parse_error):
        # Simple error handler to pass to xml parsing function.
        raise TaskError('Error parsing xml file at {}: {}'
          .format(parse_error.xml_path, parse_error.cause))

      all_tests_info = self.parse_test_info(junitxml_path, parse_error_handler, ['file', 'name'])
      for test_name, test_info in all_tests_info.items():
        test_target = self._get_target_from_test(test_info, targets)
        self.report_all_info_for_single_test(self.options_scope, test_target, test_name, test_info)

      return result.with_failed_targets(failed_targets)
Exemplo n.º 25
0
 def sources_relative_to_source_root(self):
   """
   :API: public
   """
   if self.has_sources():
     abs_source_root = os.path.join(get_buildroot(), self.target_base)
     for source in self.sources_relative_to_buildroot():
       abs_source = os.path.join(get_buildroot(), source)
       yield os.path.relpath(abs_source, abs_source_root)
Exemplo n.º 26
0
 def write_target(target):
   # We want to operate on the final sources target owns, so we potentially replace it with
   # the target derived from it (by a codegen task).
   subject = self.derived_by_original.get(target, target)
   for rel_source in subject.sources_relative_to_buildroot():
     abs_source_path = os.path.join(get_buildroot(), rel_source)
     abs_source_root_path = os.path.join(get_buildroot(), subject.target_base)
     source_root_relative_path = os.path.relpath(abs_source_path, abs_source_root_path)
     write_target_source(subject, source_root_relative_path)
Exemplo n.º 27
0
 def write_target(target):
   if isinstance(target, tuple(self.generated_targets.keys())):
     for relpath, abspath in self.iter_generated_sources(target):
       write_codegen_source(relpath, abspath)
   else:
     for rel_source in target.sources_relative_to_buildroot():
       abs_source_path = os.path.join(get_buildroot(), rel_source)
       abs_source_root_path = os.path.join(get_buildroot(), target.target_base)
       source_root_relative_path = os.path.relpath(abs_source_path, abs_source_root_path)
       write_target_source(target, source_root_relative_path)
Exemplo n.º 28
0
 def write_target(target):
   if isinstance(target, tuple(self.GENERATED_TARGETS.keys())):
     for relpath, abspath in self.iter_generated_sources(target, self._root, self._config):
       write_codegen_source(relpath, abspath)
   else:
     for source in list(target.payload.sources) + list(target.payload.resources):
       abs_source_path = os.path.join(get_buildroot(), target.payload.sources_rel_path, source)
       abs_source_root_path = os.path.join(get_buildroot(), target.target_base)
       source_root_relative_path = os.path.relpath(abs_source_path, abs_source_root_path)
       write_target_source(target, source_root_relative_path)
Exemplo n.º 29
0
  def test_invalidate_compiles_when_scopes_change(self):
    with temporary_dir(root_dir=get_buildroot()) as workdir_parent:
      workdir = os.path.join(workdir_parent, '.pants.d')
      os.makedirs(workdir)
      with temporary_dir(root_dir=get_buildroot()) as tmp_project:
        with open(os.path.join(tmp_project, 'Foo.java'), 'w') as f:
          f.write('public class Foo {}')
        with open(os.path.join(tmp_project, 'Bar.java'), 'w') as f:
          f.write('public class Bar extends Foo {}')

        def spec(name):
          return '{}:{}'.format(os.path.basename(tmp_project), name)

        def write_build(scope):
          with open(os.path.join(tmp_project, 'BUILD'), 'w') as f:
            f.write(dedent('''
              java_library(name='foo',
                sources=['Foo.java'],
              )
              java_library(name='bar',
                sources=['Bar.java'],
                dependencies=[
                  scoped(scope='{scope}', address=':foo'),
                ],
              )
              jvm_binary(name='bin',
                main='Foo',
                dependencies=[':foo'],
              )
            ''').strip().format(scope=scope))

        write_build('')
        self.assert_success(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'compile', spec('bar'),
        ], workdir=workdir), msg='Normal build from a clean cache failed. Something may be wrong '
                                 'with the test setup.')

        write_build('runtime')
        self.assert_failure(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'compile', spec('bar'),
        ], workdir=workdir), msg='Build from a dirty cache with the dependency on :foo scoped to '
                                 'runtime passed, when it should have had a compile failure. The '
                                 'cache may not have been invalidated.')

        write_build('compile')
        self.assert_success(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'compile', spec('bar'),
        ], workdir=workdir), msg='Build from a dirty cache with the scope changed to compile '
                                 'failed. The cache may not have been invalidated.')

        write_build('compile')
        self.assert_failure(self.run_pants_with_workdir([
          '--no-java-strict-deps', 'run', spec('bin'),
        ], workdir=workdir), msg='Attempt to run binary with the dependency on :foo scoped to '
                                 'compile passed. This should have caused a runtime failure.')
  def _create_thrift_project(self):
    with self.temporary_sourcedir() as srcdir:
      with safe_open(os.path.join(srcdir, 'src/thrift/thrifttest/duck.thrift'), 'w') as fp:
        fp.write(dedent("""
            namespace go thrifttest.duck

            struct Duck {
              1: optional string quack,
            }

            service Feeder {
              void feed(1:Duck duck),
            }
            """).strip())
      with safe_open(os.path.join(srcdir, 'src/thrift/thrifttest/BUILD'), 'w') as fp:
        fp.write(dedent("""
            go_thrift_library(
              name='fleem',
              sources=['duck.thrift']
            )
            """).strip())

      with safe_open(os.path.join(srcdir, 'src/go/usethrift/example.go'), 'w') as fp:
        fp.write(dedent("""
            package usethrift

            import "thrifttest/duck"

            func whatevs(f duck.Feeder) string {
              d := duck.NewDuck()
              f.Feed(d)
              return d.GetQuack()
            }
            """).strip())
      with safe_open(os.path.join(srcdir, 'src/go/usethrift/BUILD'), 'w') as fp:
        fp.write(dedent("""
            go_library(
              dependencies=[
                '{srcdir}/src/thrift/thrifttest:fleem'
              ]
            )
            """.format(srcdir=os.path.relpath(srcdir, get_buildroot()))).strip())

      with safe_open(os.path.join(srcdir, '3rdparty/go/github.com/apache/thrift/BUILD'), 'w') as fp:
        fp.write("go_remote_library(rev='0.9.3', pkg='lib/go/thrift')")

      config = {
        'gen.go-thrift': {
          'thrift_import_target':
              os.path.join(os.path.relpath(srcdir, get_buildroot()),
                           '3rdparty/go/github.com/apache/thrift:lib/go/thrift'),
          'thrift_import': 'github.com/apache/thrift/lib/go/thrift'
        }
      }
      yield srcdir, config
Exemplo n.º 31
0
    def compile(self, args, classpath, sources, classes_output_dir,
                upstream_analysis, analysis_file, log_file, settings):
        # We add compiler_classpath to ensure the scala-library jar is on the classpath.
        # TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't
        # usually need. Be more selective?
        # TODO(John Sirois): Do we need to do this at all?  If adding scala-library to the classpath is
        # only intended to allow target authors to omit a scala-library dependency, then ScalaLibrary
        # already overrides traversable_dependency_specs to achieve the same end; arguably at a more
        # appropriate level and certainly at a more appropriate granularity.
        relativized_classpath = relativize_paths(
            self.compiler_classpath() + classpath, get_buildroot())

        zinc_args = []

        zinc_args.extend([
            '-log-level',
            self.get_options().level, '-analysis-cache', analysis_file,
            '-classpath', ':'.join(relativized_classpath), '-d',
            classes_output_dir
        ])
        if not self.get_options().colors:
            zinc_args.append('-no-color')
        if not self.get_options().name_hashing:
            zinc_args.append('-no-name-hashing')
        if log_file:
            zinc_args.extend(['-capture-log', log_file])

        zinc_args.extend(
            ['-compiler-interface',
             self.tool_jar('compiler-interface')])
        zinc_args.extend(['-sbt-interface', self.tool_jar('sbt-interface')])
        zinc_args.extend(['-scala-path', ':'.join(self.compiler_classpath())])

        zinc_args += self.plugin_args()
        if upstream_analysis:
            zinc_args.extend([
                '-analysis-map', ','.join('{}:{}'.format(*kv)
                                          for kv in upstream_analysis.items())
            ])

        zinc_args += args

        zinc_args.extend([
            '-C-source',
            '-C{}'.format(settings.source_level),
            '-C-target',
            '-C{}'.format(settings.target_level),
        ])
        zinc_args.extend(settings.args)

        jvm_options = list(self._jvm_options)

        zinc_args.extend(sources)

        self.log_zinc_file(analysis_file)
        if self.runjava(classpath=self.zinc_classpath(),
                        main=self._ZINC_MAIN,
                        jvm_options=jvm_options,
                        args=zinc_args,
                        workunit_name='zinc',
                        workunit_labels=[WorkUnitLabel.COMPILER]):
            raise TaskError('Zinc compile failed.')
Exemplo n.º 32
0
import os

from pants.backend.jvm.ossrh_publication_metadata import (
    Developer,
    License,
    OSSRHPublicationMetadata,
    Scm,
)
from pants.backend.jvm.repository import Repository
from pants.base.build_environment import get_buildroot
from pants.build_graph.build_file_aliases import BuildFileAliases

oss_sonatype_repo = Repository(
    name='oss_sonatype_repo',
    url='https://oss.sonatype.org/#stagingRepositories',
    push_db_basedir=os.path.join(get_buildroot(), 'build-support', 'fsqio',
                                 'pushdb'),
)


def io_fsq_publication_metadata(description):
    return OSSRHPublicationMetadata(
        description=description,
        url='http://github.com/foursquare/fsqio',
        licenses=[
            License(name='Apache License, Version 2.0',
                    url='http://www.apache.org/licenses/LICENSE-2.0')
        ],
        developers=[
            Developer(name='Fsq.io, OSS projects from Foursquare.',
                      url='https://github.com/foursquare/fsqio')
Exemplo n.º 33
0
 def __call__(self, *args, **kwargs):
     root = os.path.join(get_buildroot(), self.rel_path)
     return self.wrapped_fn(root=root, *args, **kwargs)
Exemplo n.º 34
0
    def test_namespace_effective(self):
        self.create_file('src/thrift/com/foo/one.thrift',
                         contents=dedent("""
    namespace py foo.bar

    struct One {}
    """))
        one = self.make_target(spec='src/thrift/com/foo:one',
                               target_type=PythonThriftLibrary,
                               sources=['one.thrift'])
        apache_thrift_gen, synthetic_target_one = self.generate_single_thrift_target(
            one)

        self.create_file('src/thrift2/com/foo/two.thrift',
                         contents=dedent("""
    namespace py foo.baz

    struct Two {}
    """))
        two = self.make_target(spec='src/thrift2/com/foo:two',
                               target_type=PythonThriftLibrary,
                               sources=['two.thrift'])
        _, synthetic_target_two = self.generate_single_thrift_target(two)

        # Confirm separate PYTHONPATH entries, which we need to test namespace packages.
        self.assertNotEqual(synthetic_target_one.target_base,
                            synthetic_target_two.target_base)

        targets = (synthetic_target_one, synthetic_target_two)

        python_repos = global_subsystem_instance(PythonRepos)
        python_setup = global_subsystem_instance(PythonSetup)
        interpreter_cache = PythonInterpreterCache(python_setup, python_repos)
        interpreter = interpreter_cache.select_interpreter_for_targets(targets)

        # We need setuptools to import namespace packages (via pkg_resources), so we prime the
        # PYTHONPATH with interpreter extras, which Pants always populates with setuptools and wheel.
        # TODO(John Sirois): We really should be emitting setuptools in a
        # `synthetic_target_extra_dependencies` override in `ApacheThriftPyGen`:
        #   https://github.com/pantsbuild/pants/issues/5975
        pythonpath = interpreter.extras.values()
        pythonpath.extend(
            os.path.join(get_buildroot(), t.target_base) for t in targets)
        for dist in resolve(
            ['thrift=={}'.format(self.get_thrift_version(apache_thrift_gen))],
                interpreter=interpreter,
                context=python_repos.get_network_context(),
                fetchers=python_repos.get_fetchers()):
            pythonpath.append(dist.location)

        process = subprocess.Popen([
            interpreter.binary, '-c',
            'from foo.bar.ttypes import One; from foo.baz.ttypes import Two'
        ],
                                   env={
                                       'PYTHONPATH':
                                       os.pathsep.join(pythonpath)
                                   },
                                   stderr=subprocess.PIPE)
        _, stderr = process.communicate()
        self.assertEqual(0, process.returncode, stderr)
Exemplo n.º 35
0
def fast_relpath_collection(collection):
    buildroot = get_buildroot()
    return [fast_relpath_optional(c, buildroot) or c for c in collection]
  def test_stale_artifacts_rmd_when_cache_used_with_zinc(self):
    with temporary_dir() as cache_dir, \
        self.temporary_workdir() as workdir, \
        temporary_dir(root_dir=get_buildroot()) as src_dir:

      config = {
        'cache.compile.zinc': {'write_to': [cache_dir], 'read_from': [cache_dir]},
        'compile.zinc': {'incremental_caching': True },
      }

      srcfile = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'A.java')
      buildfile = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest', 'BUILD')

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""))
      self.create_file(buildfile,
                       dedent("""java_library(name='cachetest',
                                       sources=['A.java']
                          )"""))

      cachetest_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild',
                                    'cachetest:cachetest')

      # Caches values A.class, Main.class
      self.run_compile(cachetest_spec, config, workdir)

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                            class A {}
                            class NotMain {}"""))
      # Caches values A.class, NotMain.class and leaves them on the filesystem
      self.run_compile(cachetest_spec, config, workdir)

      self.create_file(srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          class Main {}"""))

      # Should cause NotMain.class to be removed
      self.run_compile(cachetest_spec, config, workdir)

      root = os.path.join(workdir, 'compile', 'zinc')

      task_versions = [p for p in os.listdir(root) if p != 'current']
      self.assertEqual(len(task_versions), 1, 'Expected 1 task version.')
      versioned_root = os.path.join(root, task_versions[0])

      per_target_dirs = os.listdir(versioned_root)
      self.assertEqual(len(per_target_dirs), 1, 'Expected 1 target.')
      target_workdir_root = os.path.join(versioned_root, per_target_dirs[0])

      target_workdirs = os.listdir(target_workdir_root)
      self.assertEqual(len(target_workdirs), 3, 'Expected 3 workdirs (current, and two versioned).')
      self.assertIn('current', target_workdirs)

      def classfiles(d):
        cd = os.path.join(target_workdir_root, d, 'classes', 'org', 'pantsbuild', 'cachetest')
        return sorted(os.listdir(cd))

      # One workdir should contain NotMain, and the other should contain Main.
      self.assertEqual(sorted(classfiles(w) for w in target_workdirs if w != 'current'),
                        sorted([['A.class', 'Main.class'], ['A.class', 'NotMain.class']]))
Exemplo n.º 37
0
 def _read_contents(path):
   with open(os.path.join(get_buildroot(), path), 'rb') as fp:
     return fp.read()
Exemplo n.º 38
0
 def create_analysis_tools(self):
     return AnalysisTools(self.dist.real_home, ZincAnalysisParser(),
                          ZincAnalysis, get_buildroot(),
                          self.get_options().pants_workdir)
Exemplo n.º 39
0
 def _record_sources_by_target(self, target, sources):
   # Record target -> source mapping for future use.
   with open(os.path.join(self._target_sources_dir, target.identifier), 'w') as outfile:
     for src in sources:
       outfile.write(os.path.join(get_buildroot(), src))
       outfile.write('\n')
Exemplo n.º 40
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self._root = get_buildroot()
     self._run = self.get_options().run
     self._recursive = self.get_options().recursive
Exemplo n.º 41
0
 def pushdb_root(self):
     return os.path.join(get_buildroot(), "testprojects", "ivy", "pushdb")
Exemplo n.º 42
0
    def setup_legacy_graph_extended(
        pants_ignore_patterns,
        workdir,
        local_store_dir,
        build_file_imports_behavior,
        options_bootstrapper,
        build_configuration,
        build_root=None,
        native=None,
        glob_match_error_behavior=None,
        build_ignore_patterns=None,
        exclude_target_regexps=None,
        subproject_roots=None,
        include_trace_on_error=True,
        execution_options=None,
    ):
        """Construct and return the components necessary for LegacyBuildGraph construction.

    :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree,
                                       usually taken from the '--pants-ignore' global option.
    :param str workdir: The pants workdir.
    :param local_store_dir: The directory to use for storing the engine's LMDB store in.
    :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use
      import statements. Valid values: "allow", "warn", "error".
    :type build_file_imports_behavior: string
    :param str build_root: A path to be used as the build root. If None, then default is used.
    :param Native native: An instance of the native-engine subsystem.
    :param options_bootstrapper: A `OptionsBootstrapper` object containing bootstrap options.
    :type options_bootstrapper: :class:`pants.options.options_bootstrapper.OptionsBootstrapper`
    :param build_configuration: The `BuildConfiguration` object to get build file aliases from.
    :type build_configuration: :class:`pants.build_graph.build_configuration.BuildConfiguration`
    :param glob_match_error_behavior: How to behave if a glob specified for a target's sources or
                                      bundles does not expand to anything.
    :type glob_match_error_behavior: :class:`pants.option.global_options.GlobMatchErrorBehavior`
    :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD
                                       files, usually taken from the '--build-ignore' global option.
    :param list exclude_target_regexps: A list of regular expressions for excluding targets.
    :param list subproject_roots: Paths that correspond with embedded build roots
                                  under the current build root.
    :param bool include_trace_on_error: If True, when an error occurs, the error message will
                include the graph trace.
    :param execution_options: Option values for (remote) process execution.
    :type execution_options: :class:`pants.option.global_options.ExecutionOptions`
    :returns: A LegacyGraphScheduler.
    """

        build_root = build_root or get_buildroot()
        build_configuration = build_configuration or BuildConfigInitializer.get(
            options_bootstrapper)
        bootstrap_options = options_bootstrapper.bootstrap_options.for_global_scope(
        )

        build_file_aliases = build_configuration.registered_aliases()
        rules = build_configuration.rules()

        symbol_table = LegacySymbolTable(build_file_aliases)

        project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns)

        execution_options = execution_options or DEFAULT_EXECUTION_OPTIONS

        # Register "literal" subjects required for these rules.
        parser = LegacyPythonCallbacksParser(symbol_table, build_file_aliases,
                                             build_file_imports_behavior)
        address_mapper = AddressMapper(
            parser=parser,
            build_ignore_patterns=build_ignore_patterns,
            exclude_target_regexps=exclude_target_regexps,
            subproject_roots=subproject_roots)

        # Create a Scheduler containing graph and filesystem rules, with no installed goals. The
        # LegacyBuildGraph will explicitly request the products it needs.
        rules = (
            [
                RootRule(Console),
                SingletonRule.from_instance(glob_match_error_behavior
                                            or GlobMatchErrorBehavior.ignore),
                SingletonRule.from_instance(build_configuration),
                SingletonRule(SymbolTable, symbol_table),
            ] + create_legacy_graph_tasks() + create_fs_rules() +
            create_process_rules() + create_graph_rules(address_mapper) +
            create_options_parsing_rules() + structs_rules() +
            # TODO: This should happen automatically, but most tests (e.g. tests/python/pants_test/auth) fail if it's not here:
            python_test_runner.rules() + rules)

        goal_map = EngineInitializer._make_goal_map_from_rules(rules)

        union_rules = build_configuration.union_rules()

        scheduler = Scheduler(
            native,
            project_tree,
            workdir,
            local_store_dir,
            rules,
            union_rules,
            execution_options,
            include_trace_on_error=include_trace_on_error,
            visualize_to_dir=bootstrap_options.native_engine_visualize_to,
        )

        return LegacyGraphScheduler(scheduler, symbol_table, goal_map)
Exemplo n.º 43
0
 def _file_path(self, path):
     return os.path.join(get_buildroot(),
                         path) if self.get_options().absolute else path
  def test_transitive_invalid_target_is_dep(self):
    with temporary_dir() as cache_dir, \
      temporary_dir(root_dir=get_buildroot()) as src_dir:

      config = {
        'cache.compile.zinc': {'write_to': [cache_dir], 'read_from': [cache_dir]},
        'compile.zinc': {'incremental_caching': True},
        'java': {'strict_deps': False},
      }
      target_dir = os.path.join(src_dir, 'org', 'pantsbuild', 'cachetest')
      a_srcfile = os.path.join(target_dir, 'A.java')
      b_srcfile = os.path.join(target_dir, 'B.java')
      c_srcfile = os.path.join(target_dir, 'C.java')
      buildfile = os.path.join(target_dir, 'BUILD')

      self.create_file(a_srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class A {}
                          """))
      self.create_file(b_srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class B {
                            A a;
                          }
                          """))
      self.create_file(c_srcfile,
                       dedent("""package org.pantsbuild.cachetest;
                          class C {
                            A a;
                          }
                          """))

      self.create_file(buildfile,
                       dedent("""
                          java_library(name='a',
                                       sources=['A.java']
                          )

                          java_library(name='b',
                                       sources=['B.java'],
                                       dependencies=[':a']
                          )

                          java_library(name='c',
                                       sources=['C.java'],
                                       dependencies=[':b']
                          )
                          """))

      c_spec = os.path.join(os.path.basename(src_dir), 'org', 'pantsbuild',
                                    'cachetest:c')

      with self.temporary_workdir() as workdir:
        self.run_compile(c_spec, config, workdir)
      # clean workdir

      # rm cache entries for a and b
      cache_dir_entries = os.listdir(os.path.join(cache_dir))
      zinc_dir = os.path.join(cache_dir, cache_dir_entries[0])
      c_or_a_cache_dirs = [subdir for subdir in os.listdir(zinc_dir)
                           if subdir.endswith('cachetest.a') or subdir.endswith('cachetest.c')]
      for subdir in c_or_a_cache_dirs:
        safe_rmtree(os.path.join(zinc_dir, subdir))

      # run compile
      with self.temporary_workdir() as workdir:
        self.run_compile(c_spec, config, workdir)
Exemplo n.º 45
0
    def register_bootstrap_options(cls, register):
        """Register bootstrap options.

    "Bootstrap options" are a small set of options whose values are useful when registering other
    options. Therefore we must bootstrap them early, before other options are registered, let
    alone parsed.

    Bootstrap option values can be interpolated into the config file, and can be referenced
    programatically in registration code, e.g., as register.bootstrap.pants_workdir.

    Note that regular code can also access these options as normal global-scope options. Their
    status as "bootstrap options" is only pertinent during option registration.
    """
        buildroot = get_buildroot()
        default_distdir_name = 'dist'
        default_distdir = os.path.join(buildroot, default_distdir_name)
        default_rel_distdir = '/{}/'.format(default_distdir_name)

        register('-l',
                 '--level',
                 choices=['trace', 'debug', 'info', 'warn'],
                 default='info',
                 recursive=True,
                 help='Set the logging level.')
        register(
            '-q',
            '--quiet',
            type=bool,
            recursive=True,
            daemon=False,
            help=
            'Squelches most console output. NOTE: Some tasks default to behaving quietly: '
            'inverting this option supports making them noisier than they would be otherwise.'
        )

        # Not really needed in bootstrap options, but putting it here means it displays right
        # after -l and -q in help output, which is conveniently contextual.
        register('--colors',
                 type=bool,
                 default=sys.stdout.isatty(),
                 recursive=True,
                 daemon=False,
                 help='Set whether log messages are displayed in color.')

        # Pants code uses this only to verify that we are of the requested version. However
        # setup scripts, runner scripts, IDE plugins, etc., may grep this out of pants.ini
        # and use it to select the right version.
        # Note that to print the version of the pants instance you're running, use -v, -V or --version.
        register('--pants-version',
                 advanced=True,
                 default=pants_version(),
                 help='Use this pants version.')

        register('--plugins',
                 advanced=True,
                 type=list,
                 help='Load these plugins.')
        register('--plugin-cache-dir',
                 advanced=True,
                 default=os.path.join(get_pants_cachedir(), 'plugins'),
                 help='Cache resolved plugin requirements here.')

        register(
            '--backend-packages',
            advanced=True,
            type=list,
            default=[
                'pants.backend.graph_info', 'pants.backend.python',
                'pants.backend.jvm', 'pants.backend.native',
                'pants.backend.codegen.antlr.java',
                'pants.backend.codegen.antlr.python',
                'pants.backend.codegen.jaxb',
                'pants.backend.codegen.protobuf.java',
                'pants.backend.codegen.ragel.java',
                'pants.backend.codegen.thrift.java',
                'pants.backend.codegen.thrift.python',
                'pants.backend.codegen.wire.java', 'pants.backend.project_info'
            ],
            help=
            'Load backends from these packages that are already on the path. '
            'Add contrib and custom backends to this list.')

        register('--pants-bootstrapdir',
                 advanced=True,
                 metavar='<dir>',
                 default=get_pants_cachedir(),
                 help='Use this dir for global cache.')
        register('--pants-configdir',
                 advanced=True,
                 metavar='<dir>',
                 default=get_pants_configdir(),
                 help='Use this dir for global config files.')
        register('--pants-workdir',
                 advanced=True,
                 metavar='<dir>',
                 default=os.path.join(buildroot, '.pants.d'),
                 help='Write intermediate output files to this dir.')
        register('--pants-supportdir',
                 advanced=True,
                 metavar='<dir>',
                 default=os.path.join(buildroot, 'build-support'),
                 help='Use support files from this dir.')
        register(
            '--pants-distdir',
            advanced=True,
            metavar='<dir>',
            default=default_distdir,
            help=
            'Write end-product artifacts to this dir. If you modify this path, you '
            'should also update --build-ignore and --pants-ignore to include the '
            'custom dist dir path as well.')
        register(
            '--pants-subprocessdir',
            advanced=True,
            default=os.path.join(buildroot, '.pids'),
            help=
            'The directory to use for tracking subprocess metadata, if any. This should '
            'live outside of the dir used by `--pants-workdir` to allow for tracking '
            'subprocesses that outlive the workdir data (e.g. `./pants server`).'
        )
        register('--pants-config-files',
                 advanced=True,
                 type=list,
                 daemon=False,
                 default=[get_default_pants_config_file()],
                 help='Paths to Pants config files.')
        # TODO: Deprecate the --pantsrc/--pantsrc-files options?  This would require being able
        # to set extra config file locations in an initial bootstrap config file.
        register('--pantsrc',
                 advanced=True,
                 type=bool,
                 default=True,
                 help='Use pantsrc files.')
        register('--pantsrc-files',
                 advanced=True,
                 type=list,
                 metavar='<path>',
                 daemon=False,
                 default=['/etc/pantsrc', '~/.pants.rc'],
                 help='Override config with values from these files. '
                 'Later files override earlier ones.')
        register(
            '--pythonpath',
            advanced=True,
            type=list,
            help='Add these directories to PYTHONPATH to search for plugins.')
        register('--target-spec-file',
                 type=list,
                 dest='target_spec_files',
                 daemon=False,
                 help='Read additional specs from this file, one per line')
        register(
            '--verify-config',
            type=bool,
            default=True,
            daemon=False,
            advanced=True,
            help=
            'Verify that all config file values correspond to known options.')

        register(
            '--build-ignore',
            advanced=True,
            type=list,
            fromfile=True,
            default=[
                '.*/', default_rel_distdir, 'bower_components/',
                'node_modules/', '*.egg-info/'
            ],
            help='Paths to ignore when identifying BUILD files. '
            'This does not affect any other filesystem operations. '
            'Patterns use the gitignore pattern syntax (https://git-scm.com/docs/gitignore).'
        )
        register(
            '--pants-ignore',
            advanced=True,
            type=list,
            fromfile=True,
            default=['.*/', default_rel_distdir],
            help=
            'Paths to ignore for all filesystem operations performed by pants '
            '(e.g. BUILD file scanning, glob matching, etc). '
            'Patterns use the gitignore syntax (https://git-scm.com/docs/gitignore).'
        )
        register(
            '--glob-expansion-failure',
            type=str,
            choices=GlobMatchErrorBehavior.allowed_values,
            default=GlobMatchErrorBehavior.default_option_value,
            advanced=True,
            help="Raise an exception if any targets declaring source files "
            "fail to match any glob provided in the 'sources' argument.")

        register('--exclude-target-regexp',
                 advanced=True,
                 type=list,
                 default=[],
                 daemon=False,
                 metavar='<regexp>',
                 help='Exclude target roots that match these regexes.')
        register(
            '--subproject-roots',
            type=list,
            advanced=True,
            fromfile=True,
            default=[],
            help=
            'Paths that correspond with build roots for any subproject that this '
            'project depends on.')
        register(
            '--owner-of',
            type=list,
            default=[],
            daemon=False,
            fromfile=True,
            metavar='<path>',
            help='Select the targets that own these files. '
            'This is the third target calculation strategy along with the --changed '
            'options and specifying the targets directly. These three types of target '
            'selection are mutually exclusive.')

        # These logging options are registered in the bootstrap phase so that plugins can log during
        # registration and not so that their values can be interpolated in configs.
        register('-d',
                 '--logdir',
                 advanced=True,
                 metavar='<dir>',
                 help='Write logs to files under this directory.')

        # This facilitates bootstrap-time configuration of pantsd usage such that we can
        # determine whether or not to use the Pailgun client to invoke a given pants run
        # without resorting to heavier options parsing.
        register(
            '--enable-pantsd',
            advanced=True,
            type=bool,
            default=False,
            help=
            'Enables use of the pants daemon (and implicitly, the v2 engine). (Beta)'
        )

        # These facilitate configuring the native engine.
        register(
            '--native-engine-visualize-to',
            advanced=True,
            default=None,
            type=dir_option,
            daemon=False,
            help=
            'A directory to write execution and rule graphs to as `dot` files. The contents '
            'of the directory will be overwritten if any filenames collide.')
        register(
            '--print-exception-stacktrace',
            advanced=True,
            type=bool,
            help=
            'Print to console the full exception stack trace if encountered.')

        # BinaryUtil options.
        register(
            '--binaries-baseurls',
            type=list,
            advanced=True,
            default=['https://binaries.pantsbuild.org'],
            help='List of URLs from which binary tools are downloaded. URLs are '
            'searched in order until the requested path is found.')
        register(
            '--binaries-fetch-timeout-secs',
            type=int,
            default=30,
            advanced=True,
            daemon=False,
            help=
            'Timeout in seconds for URL reads when fetching binary tools from the '
            'repos specified by --baseurls.')
        register(
            '--binaries-path-by-id',
            type=dict,
            advanced=True,
            help=
            ("Maps output of uname for a machine to a binary search path: "
             "(sysname, id) -> (os, arch), e.g. {('darwin', '15'): ('mac', '10.11'), "
             "('linux', 'arm32'): ('linux', 'arm32')}."))
        register(
            '--allow-external-binary-tool-downloads',
            type=bool,
            default=True,
            advanced=True,
            help=
            "If False, require BinaryTool subclasses to download their contents from urls "
            "generated from --binaries-baseurls, even if the tool has an external url "
            "generator. This can be necessary if using Pants in an environment which cannot "
            "contact the wider Internet.")

        # Pants Daemon options.
        register('--pantsd-pailgun-host',
                 advanced=True,
                 default='127.0.0.1',
                 help='The host to bind the pants nailgun server to.')
        register(
            '--pantsd-pailgun-port',
            advanced=True,
            type=int,
            default=0,
            help=
            'The port to bind the pants nailgun server to. Defaults to a random port.'
        )
        register('--pantsd-log-dir',
                 advanced=True,
                 default=None,
                 help='The directory to log pantsd output to.')
        register(
            '--pantsd-fs-event-workers',
            advanced=True,
            type=int,
            default=4,
            help=
            'The number of workers to use for the filesystem event service executor pool.'
        )
        register(
            '--pantsd-invalidation-globs',
            advanced=True,
            type=list,
            fromfile=True,
            default=[],
            help=
            'Filesystem events matching any of these globs will trigger a daemon restart.'
        )

        # Watchman options.
        register('--watchman-version',
                 advanced=True,
                 default='4.9.0-pants1',
                 help='Watchman version.')
        register(
            '--watchman-supportdir',
            advanced=True,
            default='bin/watchman',
            help=
            'Find watchman binaries under this dir. Used as part of the path to lookup '
            'the binary with --binaries-baseurls and --pants-bootstrapdir.')
        register(
            '--watchman-startup-timeout',
            type=float,
            advanced=True,
            default=30.0,
            help=
            'The watchman socket timeout (in seconds) for the initial `watch-project` command. '
            'This may need to be set higher for larger repos due to watchman startup cost.'
        )
        register('--watchman-socket-timeout',
                 type=float,
                 advanced=True,
                 default=5.0,
                 help='The watchman client socket timeout in seconds.')
        register(
            '--watchman-socket-path',
            type=str,
            advanced=True,
            default=None,
            help=
            'The path to the watchman UNIX socket. This can be overridden if the default '
            'absolute path length exceeds the maximum allowed by the OS.')

        # This option changes the parser behavior in a fundamental way (which currently invalidates
        # all caches), and needs to be parsed out early, so we make it a bootstrap option.
        register('--build-file-imports',
                 choices=['allow', 'warn', 'error'],
                 default='warn',
                 advanced=True,
                 help='Whether to allow import statements in BUILD files')

        register(
            '--remote-store-server',
            advanced=True,
            help=
            'host:port of grpc server to use as remote execution file store.')
        register(
            '--remote-store-thread-count',
            type=int,
            advanced=True,
            default=DEFAULT_EXECUTION_OPTIONS.remote_store_thread_count,
            help=
            'Thread count to use for the pool that interacts with the remote file store.'
        )
        register(
            '--remote-execution-server',
            advanced=True,
            help=
            'host:port of grpc server to use as remote execution scheduler.')
        register(
            '--remote-store-chunk-bytes',
            type=int,
            advanced=True,
            default=DEFAULT_EXECUTION_OPTIONS.remote_store_chunk_bytes,
            help=
            'Size in bytes of chunks transferred to/from the remote file store.'
        )
        register(
            '--remote-store-chunk-upload-timeout-seconds',
            type=int,
            advanced=True,
            default=DEFAULT_EXECUTION_OPTIONS.
            remote_store_chunk_upload_timeout_seconds,
            help=
            'Timeout (in seconds) for uploads of individual chunks to the remote file store.'
        )

        # This should eventually deprecate the RunTracker worker count, which is used for legacy cache
        # lookups via CacheSetup in TaskBase.
        register(
            '--process-execution-parallelism',
            type=int,
            default=multiprocessing.cpu_count(),
            advanced=True,
            help=
            'Number of concurrent processes that may be executed either locally and remotely.'
        )
        register(
            '--process-execution-cleanup-local-dirs',
            type=bool,
            default=True,
            help=
            'Whether or not to cleanup directories used for local process execution '
            '(primarily useful for e.g. debugging).')
Exemplo n.º 46
0
    def _materialize(self, generation_result):
        remote = self.get_options().remote
        existing_go_buildfiles = set()

        def gather_go_buildfiles(rel_path):
            address_mapper = self.context.address_mapper
            for build_file in address_mapper.scan_build_files(
                    base_path=rel_path):
                existing_go_buildfiles.add(build_file)

        gather_go_buildfiles(generation_result.local_root)
        if remote and generation_result.remote_root != generation_result.local_root:
            gather_go_buildfiles(generation_result.remote_root)

        targets = set(self.context.build_graph.targets(self.is_go))
        if remote and generation_result.remote_root:
            # Generation only walks out from local source, but we might have transitive remote
            # dependencies under the remote root which are not linked except by `resolve.go`.  Add all
            # the remotes we can find to ensure they are re-materialized too.
            remote_root = os.path.join(get_buildroot(),
                                       generation_result.remote_root)
            targets.update(
                self.context.scan(remote_root).targets(self.is_remote_lib))

        failed_results = []
        for result in self.generate_build_files(targets):
            existing_go_buildfiles.discard(result.build_file_path)
            result.log(self.context.log)
            if result.failed:
                failed_results.append(result)

        if existing_go_buildfiles:
            deleted = []
            for existing_go_buildfile in existing_go_buildfiles:
                spec_path = os.path.dirname(existing_go_buildfile)
                for address in self.context.address_mapper.addresses_in_spec_path(
                        spec_path):
                    target = self.context.build_graph.resolve_address(address)
                    if isinstance(target, GoLocalSource):
                        os.unlink(
                            os.path.join(get_buildroot(),
                                         existing_go_buildfile))
                        deleted.append(existing_go_buildfile)
            if deleted:
                self.context.log.info(
                    'Deleted the following obsolete BUILD files:\n\t{}'.format(
                        '\n\t'.join(sorted(deleted))))

        if failed_results:
            self.context.log.error(
                'Un-pinned (FLOATING) Go remote library dependencies are not '
                'allowed in this repository!\n'
                'Found the following FLOATING Go remote libraries:\n\t{}'.
                format('\n\t'.join('{}'.format(result)
                                   for result in failed_results)))
            self.context.log.info(
                'You can fix this by editing the target in each FLOATING BUILD file '
                'listed above to include a `rev` parameter that points to a sha, tag '
                'or commit id that pins the code in the source repository to a fixed, '
                'non-FLOATING version.')
            raise self.FloatingRemoteError(
                'Un-pinned (FLOATING) Go remote libraries detected.')
Exemplo n.º 47
0
    def generate_project(self, project):
        def is_test(source_set):
            # Non test targets that otherwise live in test target roots (say a java_library), must
            # be marked as test for IDEA to correctly link the targets with the test code that uses
            # them. Therefore we check the base instead of the is_test flag.
            return source_set.source_base in SourceSet.TEST_BASES

        def create_content_root(source_set):
            root_relative_path = os.path.join(source_set.source_base, source_set.path) \
                                 if source_set.path else source_set.source_base

            sources = TemplateData(path=root_relative_path,
                                   package_prefix=source_set.path.replace(
                                       '/', '.') if source_set.path else None,
                                   is_test=is_test(source_set))

            return TemplateData(
                path=root_relative_path,
                sources=[sources],
                exclude_paths=[
                    os.path.join(source_set.source_base, x)
                    for x in source_set.excludes
                ],
            )

        content_roots = [
            create_content_root(source_set) for source_set in project.sources
        ]
        if project.has_python:
            content_roots.extend(
                create_content_root(source_set)
                for source_set in project.py_sources)

        scala = None
        if project.has_scala:
            scala = TemplateData(
                language_level=self.scala_language_level,
                maximum_heap_size=self.scala_maximum_heap_size,
                fsc=self.fsc,
                compiler_classpath=project.scala_compiler_classpath)

        configured_module = TemplateData(
            root_dir=get_buildroot(),
            path=self.module_filename,
            content_roots=content_roots,
            bash=self.bash,
            python=project.has_python,
            scala=scala,
            internal_jars=[cp_entry.jar for cp_entry in project.internal_jars],
            internal_source_jars=[
                cp_entry.source_jar for cp_entry in project.internal_jars
                if cp_entry.source_jar
            ],
            external_jars=[cp_entry.jar for cp_entry in project.external_jars],
            external_javadoc_jars=[
                cp_entry.javadoc_jar for cp_entry in project.external_jars
                if cp_entry.javadoc_jar
            ],
            external_source_jars=[
                cp_entry.source_jar for cp_entry in project.external_jars
                if cp_entry.source_jar
            ],
            extra_components=[],
        )

        outdir = os.path.abspath(self.intellij_output_dir)
        if not os.path.exists(outdir):
            os.makedirs(outdir)

        configured_project = TemplateData(
            root_dir=get_buildroot(),
            outdir=outdir,
            modules=[configured_module],
            java=TemplateData(encoding=self.java_encoding,
                              maximum_heap_size=self.java_maximum_heap_size,
                              jdk=self.java_jdk,
                              language_level='JDK_1_%d' %
                              self.java_language_level),
            resource_extensions=list(project.resource_extensions),
            scala=scala,
            checkstyle_suppression_files=','.join(
                project.checkstyle_suppression_files),
            checkstyle_classpath=';'.join(project.checkstyle_classpath),
            debug_port=project.debug_port,
            extra_components=[],
        )

        existing_project_components = None
        existing_module_components = None
        if not self.nomerge:
            # Grab the existing components, which may include customized ones.
            existing_project_components = self._parse_xml_component_elements(
                self.project_filename)
            existing_module_components = self._parse_xml_component_elements(
                self.module_filename)

        # Generate (without merging in any extra components).
        safe_mkdir(os.path.abspath(self.intellij_output_dir))

        ipr = self._generate_to_tempfile(
            Generator(pkgutil.get_data(__name__, self.project_template),
                      project=configured_project))
        iml = self._generate_to_tempfile(
            Generator(pkgutil.get_data(__name__, self.module_template),
                      module=configured_module))

        if not self.nomerge:
            # Get the names of the components we generated, and then delete the
            # generated files.  Clunky, but performance is not an issue, and this
            # is an easy way to get those component names from the templates.
            extra_project_components = self._get_components_to_merge(
                existing_project_components, ipr)
            extra_module_components = self._get_components_to_merge(
                existing_module_components, iml)
            os.remove(ipr)
            os.remove(iml)

            # Generate again, with the extra components.
            ipr = self._generate_to_tempfile(
                Generator(pkgutil.get_data(__name__, self.project_template),
                          project=configured_project.extend(
                              extra_components=extra_project_components)))
            iml = self._generate_to_tempfile(
                Generator(pkgutil.get_data(__name__, self.module_template),
                          module=configured_module.extend(
                              extra_components=extra_module_components)))

        shutil.move(ipr, self.project_filename)
        shutil.move(iml, self.module_filename)

        print('\nGenerated project at %s%s' %
              (self.gen_project_workdir, os.sep))

        return self.project_filename if self.open else None
Exemplo n.º 48
0
 def _get_relative_classes_dir_from_target(self, target, compile_contexts):
     cc = self.select_runtime_context(compile_contexts[target])
     return fast_relpath(cc.classes_dir.path, get_buildroot()) + '/**'
Exemplo n.º 49
0
    def test_invalidate_compiles_when_scopes_change(self):
        with temporary_dir(root_dir=get_buildroot()) as workdir_parent:
            workdir = os.path.join(workdir_parent, ".pants.d")
            os.makedirs(workdir)
            with temporary_dir(root_dir=get_buildroot()) as tmp_project:
                with open(os.path.join(tmp_project, "Foo.java"), "w") as f:
                    f.write("public class Foo {}")
                with open(os.path.join(tmp_project, "Bar.java"), "w") as f:
                    f.write("public class Bar extends Foo {}")

                def spec(name):
                    return f"{os.path.basename(tmp_project)}:{name}"

                def write_build(scope):
                    with open(os.path.join(tmp_project, "BUILD"), "w") as f:
                        f.write(
                            dedent("""
                                java_library(name='foo',
                                  sources=['Foo.java'],
                                )
                                java_library(name='bar',
                                  sources=['Bar.java'],
                                  dependencies=[
                                    scoped(scope='{scope}', address=':foo'),
                                  ],
                                )
                                jvm_binary(name='bin',
                                  main='Foo',
                                  dependencies=[':foo'],
                                )
                                """).strip().format(scope=scope))

                write_build("")
                self.assert_success(
                    self.run_pants_with_workdir(
                        ["--no-java-strict-deps", "compile",
                         spec("bar")],
                        workdir=workdir),
                    msg=
                    "Normal build from a clean cache failed. Something may be wrong "
                    "with the test setup.",
                )

                write_build("runtime")
                self.assert_failure(
                    self.run_pants_with_workdir(
                        ["--no-java-strict-deps", "compile",
                         spec("bar")],
                        workdir=workdir),
                    msg=
                    "Build from a dirty cache with the dependency on :foo scoped to "
                    "runtime passed, when it should have had a compile failure. The "
                    "cache may not have been invalidated.",
                )

                write_build("compile")
                self.assert_success(
                    self.run_pants_with_workdir(
                        ["--no-java-strict-deps", "compile",
                         spec("bar")],
                        workdir=workdir),
                    msg=
                    "Build from a dirty cache with the scope changed to compile "
                    "failed. The cache may not have been invalidated.",
                )

                write_build("compile")
                self.assert_failure(
                    self.run_pants_with_workdir(
                        ["--no-java-strict-deps", "run",
                         spec("bin")],
                        workdir=workdir),
                    msg=
                    "Attempt to run binary with the dependency on :foo scoped to "
                    "compile passed. This should have caused a runtime failure.",
                )
Exemplo n.º 50
0
        def work_for_vts_rsc(vts, ctx):
            # Double check the cache before beginning compilation
            hit_cache = self.check_cache(vts, counter)
            target = ctx.target
            tgt, = vts.targets

            if not hit_cache:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   ' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                self.context.log.info(
                    counter_str, 'Rsc-ing ',
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')

                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_directory_digests = []
                classpath_product = self.context.products.get_data(
                    'rsc_mixed_compile_classpath')
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if classpath_entry.directory_digest:
                        classpath_directory_digests.append(
                            classpath_entry.directory_digest)
                    else:
                        logger.warning(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of rsc".format(classpath_entry))

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest,
                             sources_snapshot.directory_digest) +
                            tuple(classpath_directory_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

                    (input_digest, classpath_entry_paths
                     ) = self.execution_strategy_enum.resolve_for_enum_variant(
                         {
                             self.HERMETIC: hermetic_digest_classpath,
                             self.SUBPROCESS: nonhermetic_digest_classpath,
                             self.NAILGUN: nonhermetic_digest_classpath,
                         })()

                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(classpath_entry_paths),
                        '-d',
                        rsc_jar_file_relative_path,
                    ] + self.get_options().extra_rsc_args + target_sources

                    self.write_argsfile(ctx, args)

                    self._runtool(distribution, input_digest, ctx)

                self._record_target_stats(tgt, len(classpath_entry_paths),
                                          len(target_sources), timer.elapsed,
                                          False, 'rsc')

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
Exemplo n.º 51
0
    def _runtool_hermetic(self, main, tool_name, distribution, input_digest,
                          ctx):
        tool_classpath_abs = self._rsc_classpath
        tool_classpath = fast_relpath_collection(tool_classpath_abs)

        jvm_options = self._jvm_options

        if self._rsc.use_native_image:
            #jvm_options = []
            if jvm_options:
                raise ValueError(
                    "`{}` got non-empty jvm_options when running with a graal native-image, but this is "
                    "unsupported. jvm_options received: {}".format(
                        self.options_scope, safe_shlex_join(jvm_options)))
            native_image_path, native_image_snapshot = self._rsc.native_image(
                self.context)
            additional_snapshots = [native_image_snapshot]
            initial_args = [native_image_path]
        else:
            additional_snapshots = []
            initial_args = [
                distribution.java,
            ] + self.get_options().jvm_options + [
                '-cp',
                os.pathsep.join(tool_classpath),
                main,
            ]

        argfile_snapshot, = self.context._scheduler.capture_snapshots([
            PathGlobsAndRoot(
                PathGlobs([fast_relpath(ctx.args_file, get_buildroot())]),
                get_buildroot(),
            ),
        ])

        cmd = initial_args + ['@{}'.format(argfile_snapshot.files[0])]

        pathglobs = list(tool_classpath)

        if pathglobs:
            root = PathGlobsAndRoot(PathGlobs(tuple(pathglobs)),
                                    get_buildroot())
            # dont capture snapshot, if pathglobs is empty
            path_globs_input_digest = self.context._scheduler.capture_snapshots(
                (root, ))[0].directory_digest

        epr_input_files = self.context._scheduler.merge_directories(
            ((path_globs_input_digest, ) if path_globs_input_digest else ()) +
            ((input_digest, ) if input_digest else ()) +
            tuple(s.directory_digest for s in additional_snapshots) +
            (argfile_snapshot.directory_digest, ))

        epr = ExecuteProcessRequest(
            argv=tuple(cmd),
            input_files=epr_input_files,
            output_files=(fast_relpath(ctx.rsc_jar_file.path,
                                       get_buildroot()), ),
            output_directories=tuple(),
            timeout_seconds=15 * 60,
            description='run {} for {}'.format(tool_name, ctx.target),
            # TODO: These should always be unicodes
            # Since this is always hermetic, we need to use `underlying.home` because
            # ExecuteProcessRequest requires an existing, local jdk location.
            jdk_home=distribution.underlying_home,
        )
        res = self.context.execute_process_synchronously_without_raising(
            epr, self.name(), [WorkUnitLabel.COMPILER])

        if res.exit_code != 0:
            raise TaskError(res.stderr, exit_code=res.exit_code)

        # TODO: parse the output of -Xprint:timings for rsc and write it to self._record_target_stats()!

        res.output_directory_digest.dump(ctx.rsc_jar_file.path)

        ctx.rsc_jar_file = ClasspathEntry(ctx.rsc_jar_file.path,
                                          res.output_directory_digest)

        self.context._scheduler.materialize_directories((
            DirectoryToMaterialize(
                # NB the first element here is the root to materialize into, not the dir to snapshot
                get_buildroot(),
                res.output_directory_digest), ))

        return res
Exemplo n.º 52
0
 def __call__(self):
     return os.path.join(get_buildroot(), self.rel_path)
Exemplo n.º 53
0
 def compute_target_dir(self, target):
   # Sources are all relative to their roots: relativize directories as well to avoid
   # breaking filesystem path length limits.
   return relativize_path(os.path.join(self.workdir, target.id), get_buildroot())
 def temporary_sourcedir(self):
     return temporary_dir(root_dir=get_buildroot())
Exemplo n.º 55
0
    def generate_project(self, project):
        def create_content_root(source_set):
            root_relative_path = os.path.join(source_set.source_base, source_set.path) \
                                 if source_set.path else source_set.source_base
            if source_set.resources_only:
                if source_set.is_test:
                    content_type = 'java-test-resource'
                else:
                    content_type = 'java-resource'
            else:
                content_type = ''

            sources = TemplateData(path=root_relative_path,
                                   package_prefix=source_set.path.replace(
                                       '/', '.') if source_set.path else None,
                                   is_test=source_set.is_test,
                                   content_type=content_type)

            return TemplateData(
                path=root_relative_path,
                sources=[sources],
                exclude_paths=[
                    os.path.join(source_set.source_base, x)
                    for x in source_set.excludes
                ],
            )

        content_roots = [
            create_content_root(source_set) for source_set in project.sources
        ]
        if project.has_python:
            content_roots.extend(
                create_content_root(source_set)
                for source_set in project.py_sources)

        scala = None
        if project.has_scala:
            scala = TemplateData(
                language_level=self.scala_language_level,
                maximum_heap_size=self.scala_maximum_heap_size,
                fsc=self.fsc,
                compiler_classpath=project.scala_compiler_classpath)

        exclude_folders = []
        if self.get_options().exclude_maven_target:
            exclude_folders += IdeaGen._maven_targets_excludes(get_buildroot())

        exclude_folders += self.get_options().exclude_folders

        java_language_level = None
        for target in project.targets:
            if isinstance(target, JvmTarget):
                if java_language_level is None or java_language_level < target.platform.source_level:
                    java_language_level = target.platform.source_level
        if java_language_level is not None:
            java_language_level = 'JDK_{0}_{1}'.format(
                *java_language_level.components[:2])

        configured_module = TemplateData(
            root_dir=get_buildroot(),
            path=self.module_filename,
            content_roots=content_roots,
            bash=self.bash,
            python=project.has_python,
            scala=scala,
            internal_jars=[cp_entry.jar for cp_entry in project.internal_jars],
            internal_source_jars=[
                cp_entry.source_jar for cp_entry in project.internal_jars
                if cp_entry.source_jar
            ],
            external_jars=[cp_entry.jar for cp_entry in project.external_jars],
            external_javadoc_jars=[
                cp_entry.javadoc_jar for cp_entry in project.external_jars
                if cp_entry.javadoc_jar
            ],
            external_source_jars=[
                cp_entry.source_jar for cp_entry in project.external_jars
                if cp_entry.source_jar
            ],
            annotation_processing=self.annotation_processing_template,
            extra_components=[],
            exclude_folders=exclude_folders,
            java_language_level=java_language_level,
        )

        outdir = os.path.abspath(self.intellij_output_dir)
        if not os.path.exists(outdir):
            os.makedirs(outdir)

        scm = get_scm()
        configured_project = TemplateData(
            root_dir=get_buildroot(),
            outdir=outdir,
            git_root=scm.worktree,
            modules=[configured_module],
            java=TemplateData(encoding=self.java_encoding,
                              maximum_heap_size=self.java_maximum_heap_size,
                              jdk=self.java_jdk,
                              language_level='JDK_1_{}'.format(
                                  self.java_language_level)),
            resource_extensions=list(project.resource_extensions),
            scala=scala,
            checkstyle_classpath=';'.join(project.checkstyle_classpath),
            debug_port=project.debug_port,
            annotation_processing=self.annotation_processing_template,
            extra_components=[],
        )

        existing_project_components = None
        existing_module_components = None
        if not self.nomerge:
            # Grab the existing components, which may include customized ones.
            existing_project_components = self._parse_xml_component_elements(
                self.project_filename)
            existing_module_components = self._parse_xml_component_elements(
                self.module_filename)

        # Generate (without merging in any extra components).
        safe_mkdir(os.path.abspath(self.intellij_output_dir))

        ipr = self._generate_to_tempfile(
            Generator(pkgutil.get_data(__name__, self.project_template),
                      project=configured_project))
        iml = self._generate_to_tempfile(
            Generator(pkgutil.get_data(__name__, self.module_template),
                      module=configured_module))

        if not self.nomerge:
            # Get the names of the components we generated, and then delete the
            # generated files.  Clunky, but performance is not an issue, and this
            # is an easy way to get those component names from the templates.
            extra_project_components = self._get_components_to_merge(
                existing_project_components, ipr)
            extra_module_components = self._get_components_to_merge(
                existing_module_components, iml)
            os.remove(ipr)
            os.remove(iml)

            # Generate again, with the extra components.
            ipr = self._generate_to_tempfile(
                Generator(pkgutil.get_data(__name__, self.project_template),
                          project=configured_project.extend(
                              extra_components=extra_project_components)))
            iml = self._generate_to_tempfile(
                Generator(pkgutil.get_data(__name__, self.module_template),
                          module=configured_module.extend(
                              extra_components=extra_module_components)))

        self.context.log.info(
            'Generated IntelliJ project in {directory}'.format(
                directory=self.gen_project_workdir))

        shutil.move(ipr, self.project_filename)
        shutil.move(iml, self.module_filename)
        return self.project_filename if self.open else None
Exemplo n.º 56
0
 def __call__(self):
     """
     :returns: The absolute path of this BUILD file.
     """
     return os.path.join(get_buildroot(), self._parse_context.rel_path)
Exemplo n.º 57
0
class TargetBagMixin(object):
    """Task Mixin that will scan configured paths and inject discovered targets into the build_graph."""

    # Override these at Task level as needed- these defaults are pretty bad.
    SYNTHETIC_TARGET_PATH = get_buildroot()

    # Implementing classes that need overrides are offered a few basic levers:
    #  - inject_synthetic_target - allows configuring the synthetic target bag.
    #  - maybe_add_dependency_edges - can add further dependency edges as needed.
    #  - new_target_roots - Can choose to expand the original target_roots or redefine altogether.

    @classmethod
    def prepare(cls, options, round_manager):
        super(TargetBagMixin, cls).prepare(options, round_manager)

    # I am not including these options in any fingerprint bc all that matters is the relationship between targets.
    @classmethod
    def register_options(cls, register):
        super(TargetBagMixin, cls).register_options(register)
        register(
            '--spec-paths',
            type=list,
            default=[],
            advanced=True,
            help=
            'Gather target addresses found in this directory, non-recursively.'
        ),
        register(
            '--spec-roots',
            type=list,
            advanced=True,
            default=['3rdparty'],
            help=
            'Gather target addresses found when recursively scanning this path.'
        ),
        # I could not find a sensible way to include the buildroot in the other options, so fell back to a yes/no.
        register(
            '--include-buildroot',
            type=bool,
            advanced=True,
            default=False,
            help='Include targets defined in the buildroot, non-recursive.'),

    @classmethod
    def gathered_target_type_aliases(cls):
        """Return a tuple of target type_alias to collect.

    A tuple of target type_alias string that indicate the addressable to scan and collect from the BUILD files
    e.g. ("jar_library", "python_library").
    :rtype tuple:
    """
        raise NotImplementedError

    @classmethod
    def injected_target_name(cls):
        """Return a unique string that will be used to identify the created synthetic bag target."""
        raise NotImplementedError

    @classmethod
    def get_synthetic_address(cls):
        # TODO(mateo): Add an error catch in case the address is already in the graph (meaning two implementing tasks),
        # which means one or both must also override the constants used for the addressing.
        return Address(spec_path='', target_name=cls.injected_target_name())

    @classmethod
    def inject_synthetic_target(cls, build_graph, synthetic_address, *args,
                                **kwargs):
        """Create a synthetic target that depends on the set of jar_library_targets.

    The created target is injected into the build graph as an unconnected target with a payload of
    a JarsField populated by the JarDependencies implied by the jar_library_targets.
    The synthetic target's address can be passed as an argument or set as an override with cls.get_synthetic_address.

    :param `pants.build_graph.BuildGraph` build_graph: Populated build_graph instance
    :param `pants.build_graph.Address` synthetic_address: Address to be used by the injected target.
    :param *args: Any args required by the synthetic target type.
    :param **kwargs: Keywords required by the synthetic target type.
    :returns target:
    :rtype subclass of `pants.target.Target`:
    """
        build_graph.inject_synthetic_target(synthetic_address, Target, *args,
                                            **kwargs)
        return build_graph.get_target(synthetic_address)

    @classmethod
    def maybe_add_dependency_edges(cls, build_graph, original_targets,
                                   synthetic_target):
        """Opportunity to connect the synthetic_target to any target that was originally in the build graph.

    This passes the targets found in the build_graph before any synthetic targets were created, to try and
    limit the opportunities for accidental build graph cycles.

    :param `pants.build_graph.BuildGraph` build_graph: Populated build_graph instance.
    :param collection[`pants.target.Target`]: Set of targets originally found in build_graph.
    :param subclass of `pants.target.Target` target: The synthetic target created by the task.

    :param **kwargs: Keywords required by the synthetic target type.
    :returns target:
    :rtype subclass of `pants.target.Target`:
    """
        # Default is to opt-out and keep the target as an unconnected component, which is neither helpful nor disruptive.
        pass

    @classmethod
    def new_target_roots(cls, build_graph, synthetic_target):
        """Return a list of targets that will become the new target_roots for the entire Pants run.

    :param `pants.build_graph.BuildGraph` build_graph: Populated build_graph instance.
    :param subclass of `pants.target.Target` target: The synthetic target created by the task.
    :returns List of targets that will replace the original target_roots or None to leave them unchanged.
    :rtype list or None:
    """
        # Default of None means the mixin does not redefine the target_roots, but just inject new targets into the graph.
        # The options is surfaced in case it is needed but AFAICT the best solution is to just pull the target bag into
        # context by injecting it as a dependency through the cls.maybe_add_dependency_edges.
        #
        # One other thing to be aware of is that today(Pants 1.4.0 and earlier) any given invocation of Pants is allowed to
        # propose a single alternate_target_root. There are some upstream tasks that do so (in `changed` goal, primarily)
        # so returning anything more than None here will make the implementing class incompatible with those and any other
        # tasks that propose new target_roots. FYI.
        return None

    @classmethod
    def add_payload_fields(cls, build_graph, addresses, payload):
        """Add fields to a payload

    :param `pants.build_graph.BuildGraph` build_graph: Populated build_graph instance
    :param collection[`pants.build_graph.Addresses`] addresses: Collection of addresses.
      not in the context of any target_root.
    :param `pants.base.Payload` payload: Payload to be updated.
    :returns payload:
    :rtype pants.base.Payload:
    """
        return payload

    @classmethod
    def create_synthetic_target(cls, options, address_mapper, build_graph,
                                matching_addresses):
        """Create a synthetic target that depends on the set of jar_library_targets.

    The created target is injected into the build graph as an unconnected target with a payload of
    a JarsField populated by the JarDependencies implied by the jar_library_targets.

    :param `pants.option.options.Option` options: The Task's scoped options.
    :param `pants.build_graph.AddressMapper` address_mapper: Populated build_graph instance.
    :param `pants.build_graph.BuildGraph` build_graph: Populated build_graph instance
    :param collection[`pants.target.Target`] discovered_targets: Targets newly injected into build graph but possibly
      not in the context of any target_root.
    :returns synthetic target:
    :rtype subclass of `pants.target.Target`:
    """

        payload = cls.add_payload_fields(build_graph, matching_addresses,
                                         Payload())
        synthetic_target = cls.inject_synthetic_target(
            build_graph,
            cls.get_synthetic_address(),
            payload=payload,
            dependencies=matching_addresses,
        )
        return synthetic_target

    @classmethod
    def alternate_target_roots(cls, options, address_mapper, build_graph):
        discovered_targets = set()
        original_targets = build_graph.targets()

        # The address macros return OrderedSets, which is not considered here. I believe that to be fine but FYI.
        buildroot = {SiblingAddresses('')
                     } if options.get('include_buildroot') else set()
        paths_checked = {
            SiblingAddresses(t)
            for t in options.get('spec_paths')
        }
        dirs_checked = {
            DescendantAddresses(t)
            for t in options.get('spec_roots')
        }
        spec_sets = buildroot | paths_checked | dirs_checked

        all_found_addresses = address_mapper.scan_specs(spec_sets)
        matching_addresses = set()

        for address in all_found_addresses:

            # TODO(mateo): This used to be able to get this info without hydrating the target.
            target = build_graph.resolve_address(address)
            if target.type_alias in tuple(cls.gathered_target_type_aliases()):
                matching_addresses.add(address)
                build_graph.inject_address_closure(address)
        synthetic_target = cls.create_synthetic_target(options, address_mapper,
                                                       build_graph,
                                                       matching_addresses)

        # TODO(mateo): Should probably allow for implementers that do not need to create a new target and return None here.
        if not isinstance(synthetic_target, Target):
            raise ValueError(
                "{}.create_synthetic_target() needs to return a Target subclass"
                .format(cls))
        cls.maybe_add_dependency_edges(build_graph, original_targets,
                                       synthetic_target)
        return cls.new_target_roots(build_graph, synthetic_target)
Exemplo n.º 58
0
 def relative_to_exec_root(path):
     # TODO: Support workdirs not nested under buildroot by path-rewriting.
     return fast_relpath(path, get_buildroot())
Exemplo n.º 59
0
    def generate_targets(self, local_go_targets=None):
        """Generate Go targets in memory to form a complete Go graph.

    :param local_go_targets: The local Go targets to fill in a complete target graph for.  If
                             `None`, then all local Go targets under the Go source root are used.
    :type local_go_targets: :class:`collections.Iterable` of
                            :class:`pants.contrib.go.targets.go_local_source import GoLocalSource`
    :returns: A generation result if targets were generated, else `None`.
    :rtype: :class:`GoBuildgen.GenerationResult`
    """
        # TODO(John Sirois): support multiple source roots like GOPATH does?
        # The GOPATH's 1st element is read-write, the rest are read-only; ie: their sources build to
        # the 1st element's pkg/ and bin/ dirs.

        go_roots_by_category = defaultdict(list)
        # TODO: Add "find source roots for lang" functionality to SourceRoots and use that instead.
        for sr in self.context.source_roots.all_roots():
            if 'go' in sr.langs:
                go_roots_by_category[sr.category].append(sr.path)

        if go_roots_by_category[SourceRootCategories.TEST]:
            raise self.InvalidLocalRootsError(
                'Go buildgen does not support test source roots.')
        if go_roots_by_category[SourceRootCategories.UNKNOWN]:
            raise self.InvalidLocalRootsError(
                'Go buildgen does not support source roots of '
                'unknown category.')

        local_roots = go_roots_by_category[SourceRootCategories.SOURCE]
        if not local_roots:
            raise self.NoLocalRootsError(
                'Can only BUILD gen if a Go local sources source root is '
                'defined.')
        if len(local_roots) > 1:
            raise self.InvalidLocalRootsError(
                'Can only BUILD gen for a single Go local sources source '
                'root, found:\n\t{}'.format('\n\t'.join(sorted(local_roots))))
        local_root = local_roots.pop()

        if local_go_targets:
            unrooted_locals = {
                t
                for t in local_go_targets if t.target_base != local_root
            }
            if unrooted_locals:
                raise self.UnrootedLocalSourceError(
                    'Cannot BUILD gen until the following targets are '
                    'relocated to the source root at {}:\n\t{}'.format(
                        local_root, '\n\t'.join(
                            sorted(t.address.reference()
                                   for t in unrooted_locals))))
        else:
            root = os.path.join(get_buildroot(), local_root)
            local_go_targets = self.context.scan(root=root).targets(
                self.is_local_src)
            if not local_go_targets:
                return None

        remote_roots = go_roots_by_category[SourceRootCategories.THIRDPARTY]
        if len(remote_roots) > 1:
            raise self.InvalidRemoteRootsError(
                'Can only BUILD gen for a single Go remote library source '
                'root, found:\n\t{}'.format('\n\t'.join(sorted(remote_roots))))
        remote_root = remote_roots.pop() if remote_roots else None

        generator = GoTargetGenerator(
            self.import_oracle,
            self.context.build_graph,
            local_root,
            self.get_fetcher_factory(),
            generate_remotes=self.get_options().remote,
            remote_root=remote_root)
        with self.context.new_workunit('go.buildgen',
                                       labels=[WorkUnitLabel.MULTITOOL]):
            try:
                generated = generator.generate(local_go_targets)
                return self.GenerationResult(generated=generated,
                                             local_root=local_root,
                                             remote_root=remote_root)
            except generator.GenerationError as e:
                raise self.GenerationError(e)
Exemplo n.º 60
0
    def run_pants_with_workdir(self,
                               command,
                               workdir,
                               config=None,
                               stdin_data=None,
                               extra_env=None,
                               build_root=None,
                               **kwargs):

        args = [
            '--no-pantsrc',
            '--pants-workdir={}'.format(workdir),
            '--kill-nailguns',
            '--print-exception-stacktrace',
        ]

        if self.hermetic():
            args.extend([
                '--pants-config-files=[]',
                # Turn off cache globally.  A hermetic integration test shouldn't rely on cache,
                # or we have no idea if it's actually testing anything.
                '--no-cache-read',
                '--no-cache-write',
                # Turn cache on just for tool bootstrapping, for performance.
                '--cache-bootstrap-read',
                '--cache-bootstrap-write'
            ])

        if config:
            config_data = config.copy()
            ini = ConfigParser.ConfigParser(
                defaults=config_data.pop('DEFAULT', None))
            for section, section_config in config_data.items():
                ini.add_section(section)
                for key, value in section_config.items():
                    ini.set(section, key, value)
            ini_file_name = os.path.join(workdir, 'pants.ini')
            with safe_open(ini_file_name, mode='w') as fp:
                ini.write(fp)
            args.append('--config-override=' + ini_file_name)

        pants_script = os.path.join(build_root or get_buildroot(),
                                    self.PANTS_SCRIPT_NAME)
        pants_command = [pants_script] + args + command

        # Only whitelisted entries will be included in the environment if hermetic=True.
        if self.hermetic():
            env = dict()
            for h in self.hermetic_env_whitelist():
                env[h] = os.getenv(h)
            hermetic_env = os.getenv('HERMETIC_ENV')
            if hermetic_env:
                for h in hermetic_env.strip(',').split(','):
                    env[h] = os.getenv(h)
        else:
            env = os.environ.copy()
        if extra_env:
            env.update(extra_env)

        proc = subprocess.Popen(pants_command,
                                env=env,
                                stdin=subprocess.PIPE,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                **kwargs)
        (stdout_data, stderr_data) = proc.communicate(stdin_data)

        return PantsResult(pants_command, proc.returncode,
                           stdout_data.decode("utf-8"),
                           stderr_data.decode("utf-8"), workdir)