def test_locate(self): with pytest.raises(ParseContext.ContextError): ParseContext.locate() with temporary_dir() as root_dir: a_context = ParseContext(create_buildfile(root_dir, 'a')) b_context = ParseContext(create_buildfile(root_dir, 'b')) def test_in_a(): self.assertEquals(a_context, ParseContext.locate()) return b_context.do_in_context(lambda: ParseContext.locate()) self.assertEquals(b_context, a_context.do_in_context(test_in_a))
def PythonEgg(glob, name=None): """Refers to pre-built Python eggs in the file system. (To instead fetch eggs in a ``pip``/``easy_install`` way, use ``python_requirement``) E.g., ``egg(name='foo', glob='foo-0.1-py2.6.egg')`` would pick up the file ``foo-0.1-py2.6.egg`` from the ``BUILD`` file's directory; targets could depend on it by name ``foo``. :param string glob: File glob pattern. :param string name: Target name; by default uses the egg's project name. """ # TODO(John Sirois): Rationalize with globs handling in ParseContext eggs = fsglob(ParseContext.path(glob)) requirements = set() for egg in eggs: if os.path.isdir(egg): metadata = PathMetadata(egg, os.path.join(egg, 'EGG-INFO')) else: metadata = EggMetadata(zipimporter(egg)) dist = Distribution.from_filename(egg, metadata=metadata) requirements.add(dist.as_requirement()) if len(requirements) > 1: raise ValueError('Got multiple egg versions! => %s' % requirements) return PythonRequirement(str(requirements.pop()), name=name)
def test_validation(self): with ParseContext.temp("InternalTargetTest/test_validation"): InternalTarget(name="valid", dependencies=None) self.assertRaises(TargetDefinitionException, InternalTarget, name=1, dependencies=None) InternalTarget(name="valid2", dependencies=Target(name="mybird")) self.assertRaises(TargetDefinitionException, InternalTarget, name="valid3", dependencies=1)
def test_binary_target_injected_into_minified_dependencies(self): with ParseContext.temp(): foo = python_library( name = 'foo', provides = setup_py( name = 'foo', version = '0.0.0', ).with_binaries( foo_binary = pants(':foo_bin') ) ) foo_bin = python_binary( name = 'foo_bin', entry_point = 'foo.bin.foo', dependencies = [ pants(':foo_bin_dep') ] ) foo_bin_dep = python_library( name = 'foo_bin_dep' ) assert SetupPy.minified_dependencies(foo) == OrderedSet([foo_bin, foo_bin_dep]) entry_points = dict(SetupPy.iter_entry_points(foo)) assert entry_points == {'foo_binary': 'foo.bin.foo'} with self.run_execute(foo, recursive=False) as setup_py_command: setup_py_command.run_one.assert_called_with(foo) with self.run_execute(foo, recursive=True) as setup_py_command: setup_py_command.run_one.assert_called_with(foo)
def per_path_symbol_factory(path, global_symbols): per_path_symbols = {} symbols = global_symbols.copy() for alias, target_macro_factory in aliases.target_macro_factories.items( ): for target_type in target_macro_factory.target_types: symbols[ target_type] = lambda *args, **kwargs: per_path_symbols[ alias](*args, **kwargs) parse_context = ParseContext(rel_path=os.path.relpath( os.path.dirname(path), build_root), type_aliases=symbols) for alias, object_factory in aliases.context_aware_object_factories.items( ): per_path_symbols[alias] = object_factory(parse_context) for alias, target_macro_factory in aliases.target_macro_factories.items( ): target_macro = target_macro_factory.target_macro(parse_context) per_path_symbols[alias] = target_macro for target_type in target_macro_factory.target_types: per_path_symbols[target_type] = target_macro return per_path_symbols
def _get_symbols(cls, symbol_table_cls): symbol_table = symbol_table_cls.table() # TODO: nasty escape hatch aliases = symbol_table_cls.aliases() class Registrar(BuildFileTargetFactory): def __init__(self, type_alias, object_type): self._type_alias = type_alias self._object_type = object_type self._serializable = Serializable.is_serializable_type( self._object_type) @memoized_property def target_types(self): return [self._object_type] def __call__(self, *args, **kwargs): name = kwargs.get('name') if name and self._serializable: obj = self._object_type(type_alias=self._type_alias, **kwargs) cls._objects.append(obj) return obj else: return self._object_type(*args, **kwargs) # Compute a single ParseContext for a default path, which we will mutate for each parsed path. symbols = {} for alias, target_macro_factory in aliases.target_macro_factories.items( ): for target_type in target_macro_factory.target_types: symbols[target_type] = TargetAdaptor parse_context = ParseContext(rel_path='', type_aliases=symbols) for alias, symbol in symbol_table.items(): registrar = Registrar(alias, symbol) symbols[alias] = registrar symbols[symbol] = registrar if aliases.objects: symbols.update(aliases.objects) # Compute "per path" symbols (which will all use the same mutable ParseContext). aliases = symbol_table_cls.aliases() for alias, object_factory in aliases.context_aware_object_factories.items( ): symbols[alias] = object_factory(parse_context) for alias, target_macro_factory in aliases.target_macro_factories.items( ): symbols[alias] = target_macro_factory.target_macro(parse_context) for target_type in target_macro_factory.target_types: symbols[target_type] = TargetAdaptor # TODO: Replace builtins for paths with objects that will create wrapped PathGlobs objects. symbols['globs'] = Globs symbols['rglobs'] = RGlobs symbols['zglobs'] = ZGlobs return symbols, parse_context
def initialize_parse_state(self, build_file): """Creates a fresh parse state for the given build file.""" type_aliases = self._exposed_objects.copy() registered_addressable_instances = [] def registration_callback(address, addressable): registered_addressable_instances.append((address, addressable)) for alias, addressable_type in self._addressable_alias_map.items(): call_proxy = AddressableCallProxy( addressable_type=addressable_type, build_file=build_file, registration_callback=registration_callback) type_aliases[alias] = call_proxy parse_context = ParseContext(rel_path=build_file.spec_path, type_aliases=type_aliases) parse_globals = type_aliases.copy() for alias, object_factory in self._exposed_context_aware_object_factories.items( ): parse_globals[alias] = object_factory(parse_context) return self.ParseState(registered_addressable_instances, parse_globals)
def initialize_parse_state(self, build_file): """Creates a fresh parse state for the given build file. :param build_file: The BUILD file to set up a new ParseState for. :type build_file: :class:`pants.base.build_file.BuildFile` :returns: A fresh ParseState for parsing the given `build_file` with. :rtype: :class:`BuildConfiguration.ParseState` """ # TODO(John Sirois): Introduce a factory method to seal the BuildConfiguration and add a check # there that all anonymous types are covered by context aware object factories that are # Macro instances. Without this, we could have non-Macro context aware object factories being # asked to be a BuildFileTargetFactory when they are not (in SourceRoot registration context). # See: https://github.com/pantsbuild/pants/issues/2125 type_aliases = self._exposed_object_by_alias.copy() parse_context = ParseContext(rel_path=build_file.spec_path, type_aliases=type_aliases) def create_call_proxy(tgt_type, tgt_alias=None): def registration_callback(address, addressable): parse_context._storage.add(addressable, name=address.target_name) addressable_factory = self._get_addressable_factory( tgt_type, tgt_alias) return AddressableCallProxy( addressable_factory=addressable_factory, build_file=build_file, registration_callback=registration_callback) # Expose all aliased Target types. for alias, target_type in self._target_by_alias.items(): proxy = create_call_proxy(target_type, alias) type_aliases[alias] = proxy # Expose aliases for exposed objects and targets in the BUILD file. parse_globals = type_aliases.copy() # Now its safe to add mappings from both the directly exposed and macro-created target types to # their call proxies for context awares and macros to use to manufacture targets by type # instead of by alias. for alias, target_type in self._target_by_alias.items(): proxy = type_aliases[alias] type_aliases[target_type] = proxy for target_macro_factory in self._target_macro_factory_by_alias.values( ): for target_type in target_macro_factory.target_types: proxy = create_call_proxy(target_type) type_aliases[target_type] = proxy for alias, object_factory in self._exposed_context_aware_object_factory_by_alias.items( ): parse_globals[alias] = object_factory(parse_context) for alias, target_macro_factory in self._target_macro_factory_by_alias.items( ): parse_globals[alias] = target_macro_factory.target_macro( parse_context) return self.ParseState(parse_context, parse_globals)
def initialize_parse_state(self, build_file): """Creates a fresh parse state for the given build file.""" type_aliases = self._exposed_objects.copy() registered_addressable_instances = [] def registration_callback(address, addressable): registered_addressable_instances.append((address, addressable)) for alias, addressable_type in self._addressable_alias_map.items(): call_proxy = AddressableCallProxy( addressable_type=addressable_type, build_file=build_file, registration_callback=registration_callback) type_aliases[alias] = call_proxy # Expose aliases for exposed objects and addressables in the BUILD file. parse_globals = type_aliases.copy() # Now its safe to add concrete addressable type to proxy mappings for context awares to use. for alias, addressable_type in self._addressable_alias_map.items(): target_type = addressable_type.get_target_type() proxy = type_aliases[alias] type_aliases[target_type] = proxy parse_context = ParseContext(rel_path=build_file.spec_path, type_aliases=type_aliases) for alias, object_factory in self._exposed_context_aware_object_factories.items( ): parse_globals[alias] = object_factory(parse_context) return self.ParseState(registered_addressable_instances, parse_globals)
def __init__(self, address=None, payload=None, sources=None, **kwargs): if not sources: # We grab all files in the current directory except BUILD files for 2 reasons: # 1. cgo: If a file imports "C" then it may rely on sibling .c, .cc, etc files that `go build` # will compile. # 2. resources: Even though go does not support them; ie by providing a protocol to embed them # in binaries, it does allow them to be placed in a directory where a test might use them # for example via plain old filesystem access. globs = Globs( ParseContext(rel_path=address.spec_path, type_aliases={})) sources = globs( '*', exclude=[ globs('BUILD*'), # This skips subdir content. globs('*/**') ]) payload = payload or Payload() payload.add_fields({ 'sources': self.create_sources_field(sources=sources, sources_rel_path=address.spec_path, key_arg='sources'), }) super(GoLocalSource, self).__init__(address=address, payload=payload, **kwargs)
def test_python_binary_with_entry_point_and_source(self): with ParseContext.temp('src'): assert 'blork' == PythonBinary( name = 'binary1', entry_point = 'blork', source='blork.py').entry_point assert 'blork:main' == PythonBinary( name = 'binary2', entry_point = 'blork:main', source='blork.py').entry_point assert 'bin.blork:main' == PythonBinary( name = 'binary3', entry_point = 'bin.blork:main', source='bin/blork.py').entry_point
def __init__(self, target, msg): address = getattr(target, 'address', None) if address is None: try: location = ParseContext.locate().current_buildfile except ParseContext.ContextError: location = 'unknown location' address = 'unknown target of type %s in %s' % (target.__class__.__name__, location) super(Exception, self).__init__('Error with %s: %s' % (address, msg))
def dump(self): self.debug('Building PythonBinary %s:' % self._target) targets = self.resolve([self._target] + self._extra_targets) for lib in targets['libraries'] | targets['binaries']: self._dump_library(lib) generated_reqs = OrderedSet() if targets['thrifts']: for thr in set(targets['thrifts']): if thr not in self.MEMOIZED_THRIFTS: self.MEMOIZED_THRIFTS[thr] = self._generate_thrift_requirement(thr) generated_reqs.add(self.MEMOIZED_THRIFTS[thr]) with ParseContext.temp(): # trick pants into letting us add this python requirement, otherwise we get # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to # PythonRequirement(thrift) # # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we # should first detect if any explicit thrift requirements have been added and use # those. Only if they have not been supplied should we auto-inject it. generated_reqs.add(PythonRequirement('thrift', use_2to3=True, name='thrift-' + ''.join(random.sample('0123456789abcdef' * 8, 8)))) for antlr in targets['antlrs']: generated_reqs.add(self._generate_antlr_requirement(antlr)) targets['reqs'] |= generated_reqs reqs_to_build = OrderedSet() for req in targets['reqs']: if not req.should_build(self._interpreter.python, Platform.current()): self.debug('Skipping %s based upon version filter' % req) continue reqs_to_build.add(req) self._dump_requirement(req._requirement, False, req._repository) platforms = self._platforms if isinstance(self._target, PythonBinary): platforms = self._target.platforms distributions = resolve_multi( self._config, reqs_to_build, interpreter=self._interpreter, platforms=platforms) locations = set() for platform, dist_set in distributions.items(): for dist in dist_set: if dist.location not in locations: self._dump_distribution(dist) locations.add(dist.location) if len(targets['binaries']) > 1: print('WARNING: Target has multiple python_binary targets!', file=sys.stderr) return self._builder
def get_syms(): r = {} vc = ParseContext.default_globals() for s in vc: if s in PREDEFS: continue if s[0].isupper(): continue # REMIND see both jvm_binary and JvmBinary?? o = vc[s] r[s] = o return r
def _get_symbols(cls, symbol_table_cls): symbol_table = symbol_table_cls.table() # TODO: Nasty escape hatch: see https://github.com/pantsbuild/pants/issues/3561 aliases = symbol_table_cls.aliases() class Registrar(BuildFileTargetFactory): def __init__(self, type_alias, object_type): self._type_alias = type_alias self._object_type = object_type self._serializable = Serializable.is_serializable_type(self._object_type) @memoized_property def target_types(self): return [self._object_type] def __call__(self, *args, **kwargs): name = kwargs.get('name') if name and self._serializable: kwargs['type_alias'] = self._type_alias obj = self._object_type(**kwargs) cls._objects.append(obj) return obj else: return self._object_type(*args, **kwargs) symbols = {} for alias, symbol in symbol_table.items(): registrar = Registrar(alias, symbol) symbols[alias] = registrar symbols[symbol] = registrar if aliases.objects: symbols.update(aliases.objects) # Compute "per path" symbols (which will all use the same mutable ParseContext). # TODO: See https://github.com/pantsbuild/pants/issues/3561 parse_context = ParseContext(rel_path='', type_aliases=symbols) for alias, object_factory in aliases.context_aware_object_factories.items(): symbols[alias] = object_factory(parse_context) for alias, target_macro_factory in aliases.target_macro_factories.items(): underlying_symbol = symbols.get(alias, TargetAdaptor) symbols[alias] = target_macro_factory.target_macro(parse_context) for target_type in target_macro_factory.target_types: symbols[target_type] = Registrar(alias, underlying_symbol) # TODO: Replace builtins for paths with objects that will create wrapped PathGlobs objects. # The strategy for https://github.com/pantsbuild/pants/issues/3560 should account for # migrating these additional captured arguments to typed Sources. symbols['globs'] = Globs symbols['rglobs'] = RGlobs symbols['zglobs'] = ZGlobs symbols['bundle'] = BundleAdaptor return symbols, parse_context
def execute(self): if self.options.pex and self.options.ipython: self.error('Cannot specify both --pex and --ipython!') if self.options.entry_point and self.options.ipython: self.error('Cannot specify both --entry_point and --ipython!') if self.options.verbose: print('Build operating on target: %s %s' % (self.target, 'Extra targets: %s' % ' '.join(map(str, self.extra_targets)) if self.extra_targets else '')) builder = PEXBuilder(tempfile.mkdtemp(), interpreter=self.interpreter, pex_info=self.target.pexinfo if isinstance(self.target, PythonBinary) else None) if self.options.entry_point: builder.set_entry_point(self.options.entry_point) if self.options.ipython: if not self.config.has_section('python-ipython'): self.error('No python-ipython sections defined in your pants.ini!') builder.info.entry_point = self.config.get('python-ipython', 'entry_point') if builder.info.entry_point is None: self.error('Must specify entry_point for IPython in the python-ipython section ' 'of your pants.ini!') requirements = self.config.getlist('python-ipython', 'requirements', default=[]) with ParseContext.temp(): for requirement in requirements: self.extra_targets.append(PythonRequirement(requirement)) executor = PythonChroot( self.target, self.root_dir, builder=builder, interpreter=self.interpreter, extra_targets=self.extra_targets, conn_timeout=self.options.conn_timeout) executor.dump() if self.options.pex: pex_name = os.path.join(self.root_dir, 'dist', '%s.pex' % self.target.name) builder.build(pex_name) print('Wrote %s' % pex_name) return 0 else: builder.freeze() pex = PEX(builder.path(), interpreter=self.interpreter) po = pex.run(args=list(self.args), blocking=False) try: return po.wait() except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def create_dependencies(depmap): target_map = {} with ParseContext.temp(): for name, deps in depmap.items(): target_map[name] = python_library( name=name, provides=setup_py(name=name, version='0.0.0'), dependencies=[pants(':%s' % dep) for dep in deps] ) return target_map
def _instantiate_jvm_app(self, kwargs): """For JvmApp target, convert BundleAdaptor to BundleProps.""" parse_context = ParseContext(kwargs['address'].spec_path, dict()) bundleprops_factory = Bundle(parse_context) kwargs['bundles'] = [ bundleprops_factory.create_bundle_props(bundle) for bundle in kwargs['bundles'] ] return JvmApp(build_graph=self, **kwargs)
def _instantiate_app(self, target_cls: Type[TargetV1], kwargs) -> TargetV1: """For App targets, convert BundleAdaptor to BundleProps.""" parse_context = ParseContext(kwargs["address"].spec_path, dict()) bundleprops_factory = Bundle(parse_context) kwargs["bundles"] = [ bundleprops_factory.create_bundle_props(bundle) for bundle in kwargs["bundles"] ] return target_cls(build_graph=self, **kwargs)
def test_python_binary_with_entry_point_and_source_mismatch(self): with ParseContext.temp('src'): with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary1', entry_point = 'blork', source='hork.py') with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary2', entry_point = 'blork:main', source='hork.py') with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary3', entry_point = 'bin.blork', source='blork.py') with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary4', entry_point = 'bin.blork', source='bin.py')
def __init__(self, name, dependencies=None, num_sources=0, exclusives=None): with ParseContext.temp(): InternalTarget.__init__(self, name, dependencies, exclusives=exclusives) TargetWithSources.__init__(self, name, exclusives=exclusives) self.num_sources = num_sources self.declared_exclusives = defaultdict(set) if exclusives is not None: for k in exclusives: self.declared_exclusives[k] = set([exclusives[k]]) self.exclusives = None
def generate_test_targets(cls): if cls.TESTING_TARGETS is None: with ParseContext.temp(): cls.TESTING_TARGETS = [ PythonRequirement('pytest'), PythonRequirement('pytest-cov'), PythonRequirement('coverage==3.6b1'), PythonRequirement('unittest2', version_filter=lambda py, pl: py.startswith('2')), PythonRequirement('unittest2py3k', version_filter=lambda py, pl: py.startswith('3')) ] return cls.TESTING_TARGETS
def test_validation(self): with ParseContext.temp(): repo = Repository(name="myRepo", url="myUrl", push_db="myPushDb") Artifact(org="testOrg", name="testName", repo=repo, description="Test") self.assertRaises(ValueError, Artifact, org=1, name="testName", repo=repo, description="Test") self.assertRaises(ValueError, Artifact, org="testOrg", name=1, repo=repo, description="Test") self.assertRaises(ValueError, Artifact, org="testOrg", name="testName", repo=1, description="Test") self.assertRaises(ValueError, Artifact, org="testOrg", name="testName", repo=repo, description=1)
def _generate_requirement(self, library, builder_cls): library_key = self._key_generator.key_for_target(library) builder = builder_cls(library, self._root, self._config, '-' + library_key.hash[:8]) cache_dir = os.path.join(self._egg_cache_root, library_key.id) if self._build_invalidator.needs_update(library_key): sdist = builder.build(interpreter=self._interpreter) safe_mkdir(cache_dir) shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist))) self._build_invalidator.update(library_key) with ParseContext.temp(): return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)
def test_on_context_exit(self): with temporary_dir() as root_dir: parse_context = ParseContext(create_buildfile(root_dir, 'a')) with pytest.raises(parse_context.ContextError): parse_context.on_context_exit(lambda: 37) with temporary_dir() as root_dir: buildfile = create_buildfile(root_dir, 'a', content=dedent(""" import os from pants.base.parse_context import ParseContext def leave_a_trail(file, contents=''): with open(file, 'w') as b: b.write(contents) b_file = os.path.join(os.path.dirname(__file__), 'b') ParseContext.locate().on_context_exit(leave_a_trail, b_file, contents='42') assert not os.path.exists(b_file), 'Expected context exit action to be delayed.' """).strip() ) b_file = os.path.join(root_dir, 'a', 'b') self.assertFalse(os.path.exists(b_file)) ParseContext(buildfile).parse() with open(b_file, 'r') as b: self.assertEquals('42', b.read())
def _generate_symbols( build_root: str, target_type_aliases: Iterable[str], object_aliases: BuildFileAliases, use_deprecated_python_macros: bool, ) -> tuple[dict[str, Any], ParseState]: # N.B.: We re-use the thread local ParseState across symbols for performance reasons. # This allows a single construction of all symbols here that can be re-used for each BUILD # file parse with a reset of the ParseState for the calling thread. parse_state = ParseState() class Registrar: def __init__(self, type_alias: str) -> None: self._type_alias = type_alias def __call__(self, **kwargs: Any) -> TargetAdaptor: # Target names default to the name of the directory their BUILD file is in # (as long as it's not the root directory). if "name" not in kwargs: dirname = os.path.basename(parse_state.rel_path()) if not dirname: raise UnaddressableObjectError( "Targets in root-level BUILD files must be named explicitly." ) kwargs["name"] = dirname target_adaptor = TargetAdaptor(self._type_alias, **kwargs) parse_state.add(target_adaptor) return target_adaptor symbols: dict[str, Any] = { **object_aliases.objects, "build_file_dir": lambda: PurePath(parse_state.rel_path()), } symbols.update((alias, Registrar(alias)) for alias in target_type_aliases if not use_deprecated_python_macros or alias not in _AMBIGUOUS_PYTHON_MACRO_SYMBOLS) parse_context = ParseContext(build_root=build_root, type_aliases=symbols, rel_path_oracle=parse_state) for alias, object_factory in object_aliases.context_aware_object_factories.items( ): if use_deprecated_python_macros or alias not in _AMBIGUOUS_PYTHON_MACRO_SYMBOLS: symbols[alias] = object_factory(parse_context) return symbols, parse_state
def test_validation(self): with ParseContext.temp('PythonTargetTest/test_validation'): # Adding a JVM Artifact as a provides on a PythonTarget doesn't make a lot of sense. This test # sets up that very scenario, and verifies that pants throws a TargetDefinitionException. self.assertRaises(TargetDefinitionException, PythonTarget, name="one", sources=[], provides=Artifact(org='com.twitter', name='one-jar', repo=Repository(name='internal', url=None, push_db=None, exclusives=None))) name = "test-with-PythonArtifact" pa = PythonArtifact(name='foo', version='1.0', description='foo') # This test verifies that adding a 'setup_py' provides to a PythonTarget is okay. self.assertEquals(PythonTarget(name=name, provides=pa, sources=[]).name, name) name = "test-with-none" # This test verifies that having no provides is okay. self.assertEquals(PythonTarget(name=name, provides=None, sources=[]).name, name)
def _generate_symbols( target_type_aliases: Iterable[str], object_aliases: BuildFileAliases, ) -> Tuple[Dict[str, Any], ParseContext]: symbols: Dict[str, Any] = {} # Compute "per path" symbols. For performance, we use the same ParseContext, which we # mutate to set the rel_path appropriately before it's actually used. This allows this # method to reuse the same symbols for all parses. Meanwhile, we set the rel_path to None, # so that we get a loud error if anything tries to use it before it's set. # TODO: See https://github.com/pantsbuild/pants/issues/3561 parse_context = ParseContext(rel_path=None, type_aliases=symbols) class Registrar: def __init__(self, parse_context: ParseContext, type_alias: str): self._parse_context = parse_context self._type_alias = type_alias def __call__(self, *args, **kwargs): # Target names default to the name of the directory their BUILD file is in # (as long as it's not the root directory). if "name" not in kwargs: dirname = os.path.basename(self._parse_context.rel_path) if not dirname: raise UnaddressableObjectError( "Targets in root-level BUILD files must be named explicitly." ) kwargs["name"] = dirname kwargs.setdefault("type_alias", self._type_alias) target_adaptor = TargetAdaptor(**kwargs) self._parse_context._storage.add(target_adaptor) return target_adaptor symbols.update({ alias: Registrar(parse_context, alias) for alias in target_type_aliases }) symbols.update(object_aliases.objects) for alias, object_factory in object_aliases.context_aware_object_factories.items( ): symbols[alias] = object_factory(parse_context) return symbols, parse_context
def test_binary_cycle(self): with ParseContext.temp(): foo = python_library( name = 'foo', provides = setup_py( name = 'foo', version = '0.0.0', ).with_binaries( foo_binary = pants(':foo_bin') ) ) foo_bin = python_binary( name = 'foo_bin', entry_point = 'foo.bin.foo', dependencies = [ pants(':foo') ] ) with pytest.raises(TargetDefinitionException): SetupPy.minified_dependencies(foo)
def _per_path_symbol_factory(cls, path, aliases, global_symbols): per_path_symbols = {} symbols = global_symbols.copy() for alias, target_macro_factory in aliases.target_macro_factories.items(): for target_type in target_macro_factory.target_types: symbols[target_type] = Target parse_context = ParseContext(rel_path=os.path.dirname(path), type_aliases=symbols) for alias, object_factory in aliases.context_aware_object_factories.items(): per_path_symbols[alias] = object_factory(parse_context) for alias, target_macro_factory in aliases.target_macro_factories.items(): target_macro = target_macro_factory.target_macro(parse_context) per_path_symbols[alias] = target_macro for target_type in target_macro_factory.target_types: per_path_symbols[target_type] = Target return per_path_symbols
def python_requirements(requirements_relpath='requirements.txt'): """Translates a pip requirements file into an equivalent set of PythonRequirement targets. NB that there are some requirements files that can't be unambiguously translated; ie: multiple find links. For these files a ValueError will be raised that points out the issue. See the requirements file spec here: http://www.pip-installer.org/en/1.1/requirements.html :param string requirements_relpath: The relative path from the parent dir of the BUILD file using this function to the requirements file. By default a `requirements.txt` file sibling to the BUILD file is assumed. """ # TODO(John Sirois): Rework this when Patrick's target re-work branch lands - it may need special # handling. requirements = [] repository = None build_file = ParseContext.locate().current_buildfile requirements_path = os.path.join(build_file.parent_path, requirements_relpath) with open(requirements_path) as fp: for line in fp: line = line.strip() if line and not line.startswith('#'): if not line.startswith('-'): requirements.append(line) else: # handle flags we know about flag_value = line.split(' ', 1) if len(flag_value) == 2: flag = flag_value[0].strip() value = flag_value[1].strip() if flag in ('-f', '--find-links'): if repository is not None: raise ValueError('Only 1 --find-links url is supported per requirements file') repository = value for requirement in requirements: PythonRequirement(requirement, repository=repository)
def test_binary_target_injected_into_minified_dependencies_with_provider(self): with ParseContext.temp(): bar = python_library( name = 'bar', provides = setup_py( name = 'bar', version = '0.0.0', ).with_binaries( bar_binary = pants(':bar_bin') ) ) bar_bin = python_binary( name = 'bar_bin', entry_point = 'bar.bin.bar', dependencies = [ pants(':bar_bin_dep') ] ) bar_bin_dep = python_library( name = 'bar_bin_dep', provides = setup_py( name = 'bar_bin_dep', version = '0.0.0', ) ) assert SetupPy.minified_dependencies(bar) == OrderedSet([bar_bin, bar_bin_dep]) entry_points = dict(SetupPy.iter_entry_points(bar)) assert entry_points == {'bar_binary': 'bar.bin.bar'} with self.run_execute(bar, recursive=False) as setup_py_command: setup_py_command.run_one.assert_called_with(bar) with self.run_execute(bar, recursive=True) as setup_py_command: setup_py_command.run_one.assert_has_calls([ call(bar), call(bar_bin_dep) ], any_order=True)
def __init__(self, base=None, mapper=None, relative_to=None): """ :param base: Base path of the "source" file paths. By default, path of the BUILD file. Useful for assets that don't live in the source code repo. :param mapper: Function that takes a path string and returns a path string. Takes a path in the source tree, returns a path to use in the resulting bundle. By default, an identity mapper. :param string relative_to: Set up a simple mapping from source path to bundle path. E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle. """ if mapper and relative_to: raise ValueError("Must specify exactly one of 'mapper' or 'relative_to'") self._base = base or ParseContext.path() if relative_to: base = os.path.join(self._base, relative_to) if not os.path.isdir(base): raise ValueError('Could not find a directory to bundle relative to at %s' % base) self.mapper = RelativeToMapper(base) else: self.mapper = mapper or RelativeToMapper(self._base) self.filemap = {}
def __init__(self, spec, exclusives=None): """ :param string spec: target address. E.g., `src/java/com/twitter/common/util/BUILD\:util` """ # it's critical the spec is parsed 1st, the results are needed elsewhere in constructor flow parse_context = ParseContext.locate() def parse_address(): if spec.startswith(':'): # the :[target] could be in a sibling BUILD - so parse using the canonical address pathish = "%s:%s" % (parse_context.buildfile.canonical_relpath, spec[1:]) return Address.parse(parse_context.buildfile.root_dir, pathish, False) else: return Address.parse(parse_context.buildfile.root_dir, spec, False) try: self.address = parse_address() except IOError as e: self.address = parse_context.buildfile.relpath raise TargetDefinitionException(self, '%s%s' % (self._DEFINITION_ERROR_MSG, e)) # We must disable the re-init check, because our funky __getattr__ breaks it. # We're not involved in any multiple inheritance, so it's OK to disable it here. super(Pants, self).__init__(self.address.target_name, reinit_check=False, exclusives=exclusives)
def __init__(self, name, reinit_check=True, exclusives=None): """ :param string name: The target name. """ # See "get_all_exclusives" below for an explanation of the exclusives parameter. # This check prevents double-initialization in multiple-inheritance situations. # TODO(John Sirois): fix target inheritance - use super() to linearize or use alternatives to # multiple inheritance. if not reinit_check or not hasattr(self, '_initialized'): if not isinstance(name, Compatibility.string): self.address = '%s:%s' % (ParseContext.locate().current_buildfile, str(name)) raise TargetDefinitionException(self, "Invalid target name: %s" % name) self.name = name self.description = None self.address = self._locate() # TODO(John Sirois): Transition all references to self.identifier to eliminate id builtin # ambiguity self.id = self._create_id() self._register() self.labels = set() self._initialized = True self.declared_exclusives = collections.defaultdict(set) if exclusives is not None: for k in exclusives: self.declared_exclusives[k].add(exclusives[k]) self.exclusives = None # For synthetic codegen targets this will be the original target from which # the target was synthesized. self._derived_from = self
def union(cls, targets, name=None): name = name or (cls.synthetic_name(targets) + '-union') with ParseContext.temp(): return cls(name, dependencies=targets)
def _locate(self): parse_context = ParseContext.locate() return Address(parse_context.current_buildfile, self.name)
def _post_construct(self, func, *args, **kwargs): """Registers a command to invoke after this target's BUILD file is parsed.""" ParseContext.locate().on_context_exit(func, *args, **kwargs)
def _bundle(rel_path): pc = ParseContext(rel_path=rel_path, type_aliases={}) return Bundle(pc)
def _generate_symbols( symbol_table: SymbolTable, aliases: BuildFileAliases, ) -> Tuple[Dict, ParseContext]: symbols: Dict = {} # Compute "per path" symbols. For performance, we use the same ParseContext, which we # mutate (in a critical section) to set the rel_path appropriately before it's actually used. # This allows this method to reuse the same symbols for all parses. Meanwhile we set the # rel_path to None, so that we get a loud error if anything tries to use it before it's set. # TODO: See https://github.com/pantsbuild/pants/issues/3561 parse_context = ParseContext(rel_path=None, type_aliases=symbols) class Registrar(BuildFileTargetFactory): def __init__(self, parse_context, type_alias, object_type): self._parse_context = parse_context self._type_alias = type_alias self._object_type = object_type self._serializable = Serializable.is_serializable_type( self._object_type) @memoized_property def target_types(self): return [self._object_type] def __call__(self, *args, **kwargs): # Target names default to the name of the directory their BUILD file is in # (as long as it's not the root directory). if "name" not in kwargs and issubclass(self._object_type, TargetAdaptor): dirname = os.path.basename(self._parse_context.rel_path) if dirname: kwargs["name"] = dirname else: raise UnaddressableObjectError( "Targets in root-level BUILD files must be named explicitly." ) name = kwargs.get("name") if name and self._serializable: kwargs.setdefault("type_alias", self._type_alias) obj = self._object_type(**kwargs) self._parse_context._storage.add(obj) return obj else: return self._object_type(*args, **kwargs) for alias, symbol in symbol_table.table.items(): registrar = Registrar(parse_context, alias, symbol) symbols[alias] = registrar symbols[symbol] = registrar if aliases.objects: symbols.update(aliases.objects) for alias, object_factory in aliases.context_aware_object_factories.items( ): symbols[alias] = object_factory(parse_context) for alias, target_macro_factory in aliases.target_macro_factories.items( ): underlying_symbol = symbols.get(alias, TargetAdaptor) symbols[alias] = target_macro_factory.target_macro(parse_context) for target_type in target_macro_factory.target_types: symbols[target_type] = Registrar(parse_context, alias, underlying_symbol) symbols["bundle"] = BundleAdaptor return symbols, parse_context
def _generate_symbols(symbol_table, aliases): symbols = {} # Compute "per path" symbols. For performance, we use the same ParseContext, which we # mutate (in a critical section) to set the rel_path appropriately before it's actually used. # This allows this method to reuse the same symbols for all parses. Meanwhile we set the # rel_path to None, so that we get a loud error if anything tries to use it before it's set. # TODO: See https://github.com/pantsbuild/pants/issues/3561 parse_context = ParseContext(rel_path=None, type_aliases=symbols) class Registrar(BuildFileTargetFactory): def __init__(self, parse_context, type_alias, object_type): self._parse_context = parse_context self._type_alias = type_alias self._object_type = object_type self._serializable = Serializable.is_serializable_type( self._object_type) @memoized_property def target_types(self): return [self._object_type] def __call__(self, *args, **kwargs): # Target names default to the name of the directory their BUILD file is in # (as long as it's not the root directory). if 'name' not in kwargs and issubclass(self._object_type, TargetAdaptor): dirname = os.path.basename(self._parse_context.rel_path) if dirname: kwargs['name'] = dirname else: raise UnaddressableObjectError( 'Targets in root-level BUILD files must be named explicitly.' ) name = kwargs.get('name') if name and self._serializable: kwargs.setdefault('type_alias', self._type_alias) obj = self._object_type(**kwargs) self._parse_context._storage.add(obj) return obj else: return self._object_type(*args, **kwargs) for alias, symbol in symbol_table.table.items(): registrar = Registrar(parse_context, alias, symbol) symbols[alias] = registrar symbols[symbol] = registrar if aliases.objects: symbols.update(aliases.objects) for alias, object_factory in aliases.context_aware_object_factories.items( ): symbols[alias] = object_factory(parse_context) for alias, target_macro_factory in aliases.target_macro_factories.items( ): underlying_symbol = symbols.get(alias, TargetAdaptor) symbols[alias] = target_macro_factory.target_macro(parse_context) for target_type in target_macro_factory.target_types: symbols[target_type] = Registrar(parse_context, alias, underlying_symbol) # TODO: Replace builtins for paths with objects that will create wrapped PathGlobs objects. # The strategy for https://github.com/pantsbuild/pants/issues/3560 should account for # migrating these additional captured arguments to typed Sources. class GlobWrapper: def __init__(self, parse_context, glob_type): self._parse_context = parse_context self._glob_type = glob_type def __call__(self, *args, **kwargs): return self._glob_type(*args, spec_path=self._parse_context.rel_path, **kwargs) symbols['globs'] = GlobWrapper(parse_context, Globs) symbols['rglobs'] = GlobWrapper(parse_context, RGlobs) symbols['zglobs'] = GlobWrapper(parse_context, ZGlobs) symbols['bundle'] = BundleAdaptor return symbols, parse_context
def _globs(rel_path): pc = ParseContext(rel_path=rel_path, type_aliases={}) return Globs(pc)
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) self.target = None self.extra_targets = [] self.config = Config.load() self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup() interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches([self.options.interpreter] if self.options.interpreter else [b'']))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') self.interpreter = interpreters[0] for req in self.options.extra_requirements: with ParseContext.temp(): self.extra_targets.append(PythonRequirement(req, use_2to3=True)) # We parse each arg in the context of the cli usage: # ./pants command (options) [spec] (build args) # ./pants command (options) [spec]... -- (build args) # Our command token and our options are parsed out so we see args of the form: # [spec] (build args) # [spec]... -- (build args) binaries = [] for k in range(len(self.args)): arg = self.args.pop(0) if arg == '--': break def not_a_target(debug_msg): self.debug('Not a target, assuming option: %s.' % e) # We failed to parse the arg as a target or else it was in valid address format but did not # correspond to a real target. Assume this is the 1st of the build args and terminate # processing args for target addresses. self.args.insert(0, arg) target = None try: address = Address.parse(root_dir, arg) target = Target.get(address) if target is None: not_a_target(debug_msg='Unrecognized target') break except Exception as e: not_a_target(debug_msg=e) break for resolved in filter(lambda t: t.is_concrete, target.resolve()): if isinstance(resolved, PythonBinary): binaries.append(resolved) else: self.extra_targets.append(resolved) if len(binaries) == 0: # treat as a chroot pass elif len(binaries) == 1: # We found a binary and are done, the rest of the args get passed to it self.target = binaries[0] else: self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % ( arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries) )) if self.target is None: if not self.extra_targets: self.error('No valid target specified!') self.target = self.extra_targets.pop(0)
def _create_new_target(self, target_base, target_type, *args, **kwargs): if not os.path.exists(target_base): os.makedirs(target_base) SourceRoot.register(target_base, target_type) with ParseContext.temp(target_base): return target_type(*args, **kwargs)
def resolve(self, spec): """Returns an iterator over the target(s) the given address points to.""" with ParseContext.temp(): return Pants(spec).resolve()
def of(cls, target): with ParseContext.temp(): return cls(target.name, dependencies=[target])