def test_locate(self): with pytest.raises(ContextError): ParseContext.locate() with temporary_dir() as root_dir: a_context = ParseContext(create_buildfile(root_dir, 'a')) b_context = ParseContext(create_buildfile(root_dir, 'b')) def test_in_a(): self.assertEquals(a_context, ParseContext.locate()) return b_context.do_in_context(lambda: ParseContext.locate()) self.assertEquals(b_context, a_context.do_in_context(test_in_a))
def test_python_binary_with_source_no_entry_point(self): with ParseContext.temp('src'): assert PythonBinary(name='binary1', source='blork.py').entry_point == 'blork' assert PythonBinary( name='binary2', source='bin/blork.py').entry_point == 'bin.blork'
def find(target): """Finds the source root for the given target. If none is registered, the parent directory of the target's BUILD file is returned. """ target_path = os.path.relpath(target.address.buildfile.parent_path, get_buildroot()) def _find(): for typ in target.__class__.mro(): for root in SourceRoot._ROOTS_BY_TYPE.get(typ, ()): if target_path.startswith(root): return root # Try already registered roots root = _find() if root: return root # Fall back to searching the ancestor path for a root for buildfile in reversed(target.address.buildfile.ancestors()): if buildfile not in SourceRoot._SEARCHED: SourceRoot._SEARCHED.add(buildfile) ParseContext(buildfile).parse() root = _find() if root: return root # Finally, resolve files relative to the BUILD file parent dir as the target base return target_path
def compiled_idl(cls, idl_dep, generated_deps=None, compiler=None, language=None, namespace_map=None): """Marks a jar as containing IDL files that should be fetched and processed locally. idl_dep: A dependency resolvable to a single jar library. generated_deps: Dependencies for the code that will be generated from "idl_dep" compiler: The thrift compiler to apply to the fetched thrift IDL files. language: The language to generate code for - supported by some compilers namespace_map: A mapping from IDL declared namespaces to custom namespaces - supported by some compilers. """ deps = list(filter(is_concrete, idl_dep.resolve())) if not len(deps) == 1: raise TaskError( 'Can only arrange for compiled idl for a single dependency at a time, ' 'given:\n\t%s' % '\n\t'.join(map(str, deps))) jar = deps.pop() if not isinstance(jar, JarDependency): raise TaskError( 'Can only arrange for compiled idl from a jar dependency, given: %s' % jar) request = (jar, compiler, language) namespace_signature = None if namespace_map: sha = hashlib.sha1() for ns_from, ns_to in sorted(namespace_map.items()): sha.update(ns_from) sha.update(ns_to) namespace_signature = sha.hexdigest() request += (namespace_signature, ) if request not in cls._PLACEHOLDER_BY_REQUEST: if not cls._EXTRACT_BASE: config = Config.load() cls._EXTRACT_BASE = config.get('idl-extract', 'workdir') safe_mkdir(cls._EXTRACT_BASE) SourceRoot.register(cls._EXTRACT_BASE, JavaThriftLibrary) with ParseContext.temp(cls._EXTRACT_BASE): # TODO(John Sirois): abstract ivy specific configurations notion away jar._configurations.append('idl') jar.with_artifact(configuration='idl', classifier='idl') target_name = '-'.join( filter(None, (jar.id, compiler, language, namespace_signature))) placeholder = JavaThriftLibrary(target_name, sources=None, dependencies=[jar] + (generated_deps or []), compiler=compiler, language=language, namespace_map=namespace_map) cls._PLACEHOLDER_BY_REQUEST[request] = placeholder cls._PLACEHOLDERS_BY_JAR[jar].append(placeholder) return cls._PLACEHOLDER_BY_REQUEST[request]
def create_target(category, target_name, target_index, targets): def name(name): return "%s-%s-%d" % (target_name, name, target_index) # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary # so that tests can house test IDL in tests/ target_type, base = category with ParseContext.temp(base): if target_type == JavaProtobufLibrary: return _aggregate(JavaProtobufLibrary, name('protobuf'), targets, buildflags=buildflags) elif target_type == JavaThriftLibrary: return _aggregate(JavaThriftLibrary, name('thrift'), targets, buildflags=buildflags) elif target_type == AnnotationProcessor: return _aggregate(AnnotationProcessor, name('apt'), targets) elif target_type == JavaLibrary: return _aggregate(JavaLibrary, name('java'), targets, deployjar, buildflags) elif target_type == ScalaLibrary: return _aggregate(ScalaLibrary, name('scala'), targets, deployjar, buildflags) elif target_type == JavaTests: return _aggregate(JavaTests, name('java-tests'), targets, buildflags=buildflags) elif target_type == ScalaTests: return _aggregate(ScalaTests, name('scala-tests'), targets, buildflags=buildflags) else: raise Exception("Cannot aggregate targets of type: %s" % target_type)
def test_binary_target_injected_into_minified_dependencies(self): with ParseContext.temp(): foo = python_library( name = 'foo', provides = setup_py( name = 'foo', version = '0.0.0', ).with_binaries( foo_binary = pants(':foo_bin') ) ) foo_bin = python_binary( name = 'foo_bin', entry_point = 'foo.bin.foo', dependencies = [ pants(':foo_bin_dep') ] ) foo_bin_dep = python_library( name = 'foo_bin_dep' ) assert SetupPy.minified_dependencies(foo) == OrderedSet([foo_bin, foo_bin_dep]) entry_points = dict(SetupPy.iter_entry_points(foo)) assert entry_points == {'foo_binary': 'foo.bin.foo'} with self.run_execute(foo, recursive=False) as setup_py_command: setup_py_command.run_one.assert_called_with(foo) with self.run_execute(foo, recursive=True) as setup_py_command: setup_py_command.run_one.assert_called_with(foo)
def __init__(self, name, sources=None, resources=None, exclusives=None): TargetWithSources.__init__(self, name, sources=sources, exclusives=exclusives) if resources is not None: def is_resources(item): if not isinstance(item, Target): return False concrete_targets = [t for t in item.resolve() if t.is_concrete] return all(isinstance(t, Resources) for t in concrete_targets) if is_resources(resources): self.resources = list(self.resolve_all(resources, Resources)) elif isinstance(resources, Sequence) and all(map(is_resources, resources)): self.resources = list(self.resolve_all(resources, Resources)) else: # Handle parallel resource dir globs. # For example, for a java_library target base of src/main/java: # src/main/java/com/twitter/base/BUILD # We get: # sibling_resources_base = src/main/resources # base_relpath = com/twitter/base # resources_dir = src/main/resources/com/twitter/base # # TODO(John Sirois): migrate projects to Resources and remove support for old style assumed # parallel resources dirs sibling_resources_base = os.path.join(os.path.dirname(self.target_base), 'resources') base_relpath = os.path.relpath(self.address.buildfile.relpath, self.target_base) resources_dir = os.path.join(sibling_resources_base, base_relpath) with ParseContext.temp(basedir=resources_dir): self.resources = [Resources(name, resources)]
def test_validation(self): with ParseContext.temp(): repo = Repository(name="myRepo", url="myUrl", push_db="myPushDb") Artifact(org="testOrg", name="testName", repo=repo, description="Test") self.assertRaises(ValueError, Artifact, org=1, name="testName", repo=repo, description="Test") self.assertRaises(ValueError, Artifact, org="testOrg", name=1, repo=repo, description="Test") self.assertRaises(ValueError, Artifact, org="testOrg", name="testName", repo=1, description="Test") self.assertRaises(ValueError, Artifact, org="testOrg", name="testName", repo=repo, description=1)
def __init__(self, name, sources = None, resources = None, dependencies = None, module = "", module_root = "src/python"): """ name = Name of library sources = Python source files resources = non-Python resources, e.g. templates, keys, other data (it is recommended that your application uses the pkgutil package to access these resources in a .zip-module friendly way.) dependencies = other PythonLibraries, Eggs or internal Pants targets module = everything beneath module_root is relative to this module name, None if root namespace module_root = see above """ context = ParseContext.locate() self._module = module PythonTarget.__init__( self, module_root, name, sources, resources, dependencies, False)
def test_sibling_references(self): with temporary_dir() as root_dir: buildfile = create_buildfile(root_dir, 'a', name='BUILD', content=dedent(''' dependencies(name='util', dependencies=[ jar(org='com.twitter', name='util', rev='0.0.1') ] ) ''').strip()) sibling = create_buildfile(root_dir, 'a', name='BUILD.sibling', content=dedent(''' dependencies(name='util-ex', dependencies=[ pants(':util'), jar(org='com.twitter', name='util-ex', rev='0.0.1') ] ) ''').strip()) ParseContext(buildfile).parse() utilex = Target.get( Address.parse(root_dir, 'a:util-ex', is_relative=False)) utilex_deps = set(utilex.resolve()) util = Target.get( Address.parse(root_dir, 'a:util', is_relative=False)) util_deps = set(util.resolve()) self.assertEquals(util_deps, util_deps.intersection(utilex_deps))
def test_binary_target_injected_into_minified_dependencies_with_provider( self): with ParseContext.temp(): bar = python_library(name='bar', provides=setup_py( name='bar', version='0.0.0', ).with_binaries(bar_binary=pants(':bar_bin'))) bar_bin = python_binary(name='bar_bin', entry_point='bar.bin.bar', dependencies=[pants(':bar_bin_dep')]) bar_bin_dep = python_library(name='bar_bin_dep', provides=setup_py( name='bar_bin_dep', version='0.0.0', )) assert SetupPy.minified_dependencies(bar) == OrderedSet( [bar_bin, bar_bin_dep]) entry_points = dict(SetupPy.iter_entry_points(bar)) assert entry_points == {'bar_binary': 'bar.bin.bar'} with self.run_execute(bar, recursive=False) as setup_py_command: setup_py_command.run_one.assert_called_with(bar) with self.run_execute(bar, recursive=True) as setup_py_command: setup_py_command.run_one.assert_has_calls( [call(bar), call(bar_bin_dep)], any_order=True)
def test_validation(self): with ParseContext.temp('JarLibraryTest/test_validation'): target = Target(name='mybird') JarLibrary(name="test", dependencies=target) self.assertRaises(TargetDefinitionException, JarLibrary, name="test1", dependencies=None)
def test_python_binary_with_entry_point_and_source(self): with ParseContext.temp('src'): assert 'blork' == PythonBinary( name = 'binary1', entry_point = 'blork', source='blork.py').entry_point assert 'blork:main' == PythonBinary( name = 'binary2', entry_point = 'blork:main', source='blork.py').entry_point assert 'bin.blork:main' == PythonBinary( name = 'binary3', entry_point = 'bin.blork:main', source='bin/blork.py').entry_point
def test_validation(self): with ParseContext.temp('InternalTargetTest/test_validation'): InternalTarget(name="valid", dependencies=None) self.assertRaises(TargetDefinitionException, InternalTarget, name=1, dependencies=None) InternalTarget(name="valid2", dependencies=Target(name='mybird')) self.assertRaises(TargetDefinitionException, InternalTarget, name='valid3', dependencies=1)
def generate_test_targets(): if PythonTestBuilder.TESTING_TARGETS is None: with ParseContext.temp(): PythonTestBuilder.TESTING_TARGETS = [ PythonRequirement('pytest'), PythonRequirement('unittest2', version_filter=lambda:sys.version_info[0]==2), PythonRequirement('unittest2py3k', version_filter=lambda:sys.version_info[0]==3) ] return PythonTestBuilder.TESTING_TARGETS
def create_dependencies(depmap): target_map = {} with ParseContext.temp(): for name, deps in depmap.items(): target_map[name] = python_library( name=name, provides=setup_py(name=name, version='0.0.0'), dependencies=[pants(':%s' % dep) for dep in deps]) return target_map
def execute(self): if self.options.pex and self.options.ipython: self.error('Cannot specify both --pex and --ipython!') if self.options.entry_point and self.options.ipython: self.error('Cannot specify both --entry_point and --ipython!') if self.options.verbose: print('Build operating on target: %s %s' % (self.target, 'Extra targets: %s' % ' '.join(map(str, self.extra_targets)) if self.extra_targets else '')) builder = PEXBuilder(tempfile.mkdtemp(), interpreter=self.interpreter, pex_info=self.target.pexinfo if isinstance(self.target, PythonBinary) else None) if self.options.entry_point: builder.set_entry_point(self.options.entry_point) if self.options.ipython: if not self.config.has_section('python-ipython'): self.error('No python-ipython sections defined in your pants.ini!') builder.info.entry_point = self.config.get('python-ipython', 'entry_point') if builder.info.entry_point is None: self.error('Must specify entry_point for IPython in the python-ipython section ' 'of your pants.ini!') requirements = self.config.getlist('python-ipython', 'requirements', default=[]) with ParseContext.temp(): for requirement in requirements: self.extra_targets.append(PythonRequirement(requirement)) executor = PythonChroot( self.target, self.root_dir, builder=builder, interpreter=self.interpreter, extra_targets=self.extra_targets, conn_timeout=self.options.conn_timeout) executor.dump() if self.options.pex: pex_name = os.path.join(self.root_dir, 'dist', '%s.pex' % self.target.name) builder.build(pex_name) print('Wrote %s' % pex_name) return 0 else: builder.freeze() pex = PEX(builder.path(), interpreter=self.interpreter) po = pex.run(args=list(self.args), blocking=False) try: return po.wait() except KeyboardInterrupt: po.send_signal(signal.SIGINT) raise
def create_dependencies(depmap): target_map = {} with ParseContext.temp(): for name, deps in depmap.items(): target_map[name] = python_library( name=name, provides=setup_py(name=name, version='0.0.0'), dependencies=[pants(':%s' % dep) for dep in deps] ) return target_map
def test_python_binary_with_entry_point_and_source_mismatch(self): with ParseContext.temp('src'): with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary1', entry_point = 'blork', source='hork.py') with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary2', entry_point = 'blork:main', source='hork.py') with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary3', entry_point = 'bin.blork', source='blork.py') with pytest.raises(TargetDefinitionException): PythonBinary(name = 'binary4', entry_point = 'bin.blork', source='bin.py')
def __init__(self, name, dependencies=None, num_sources=0, exclusives=None): with ParseContext.temp(): InternalTarget.__init__(self, name, dependencies, exclusives=exclusives) TargetWithSources.__init__(self, name, exclusives=exclusives) self.num_sources = num_sources self.declared_exclusives = defaultdict(set) if exclusives is not None: for k in exclusives: self.declared_exclusives[k] = set([exclusives[k]]) self.exclusives = None
def generate_test_targets(): if PythonTestBuilder.TESTING_TARGETS is None: def define_targets(): return [ PythonRequirement('pytest'), PythonRequirement('unittest2', version_filter=lambda:sys.version_info[0]==2), PythonRequirement('unittest2py3k', version_filter=lambda:sys.version_info[0]==3) ] PythonTestBuilder.TESTING_TARGETS = ParseContext.fake(define_targets) return PythonTestBuilder.TESTING_TARGETS
def __init__(self, base=None, mapper=None, relative_to=None): """ :param mapper: Function that takes a path string and returns a path string. Takes a path in the source tree, returns a path to use in the resulting bundle. By default, an identity mapper. :param string relative_to: Set up a simple mapping from source path to bundle path. E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle. """ if mapper and relative_to: raise ValueError("Must specify exactly one of 'mapper' or 'relative_to'") if relative_to: base = base or ParseContext.path(relative_to) if not os.path.isdir(base): raise ValueError('Could not find a directory to bundle relative to at %s' % base) self.mapper = RelativeToMapper(base) else: self.mapper = mapper or RelativeToMapper(base or ParseContext.path()) self.filemap = {}
def generate_test_targets(): if PythonTestBuilder.TESTING_TARGETS is None: with ParseContext.temp(): PythonTestBuilder.TESTING_TARGETS = [ PythonRequirement("pytest"), PythonRequirement("pytest-cov"), PythonRequirement("coverage"), PythonRequirement("unittest2", version_filter=lambda: sys.version_info[0] == 2), PythonRequirement("unittest2py3k", version_filter=lambda: sys.version_info[0] == 3), ] return PythonTestBuilder.TESTING_TARGETS
def generate_test_targets(): if PythonTestBuilder.TESTING_TARGETS is None: with ParseContext.temp(): PythonTestBuilder.TESTING_TARGETS = [ PythonRequirement('pytest'), PythonRequirement('pytest-cov'), PythonRequirement('coverage'), PythonRequirement('unittest2', version_filter=lambda:sys.version_info[0]==2), PythonRequirement('unittest2py3k', version_filter=lambda:sys.version_info[0]==3) ] return PythonTestBuilder.TESTING_TARGETS
def generate_test_targets(cls): if cls.TESTING_TARGETS is None: with ParseContext.temp(): cls.TESTING_TARGETS = [ PythonRequirement('pytest'), PythonRequirement('pytest-cov'), PythonRequirement('coverage==3.6b1'), PythonRequirement('unittest2', version_filter=lambda py, pl: py.startswith('2')), PythonRequirement('unittest2py3k', version_filter=lambda py, pl: py.startswith('3')) ] return cls.TESTING_TARGETS
def dump(self): self.debug('Building PythonBinary %s:' % self._target) targets = self.resolve([self._target] + self._extra_targets) for lib in targets['libraries'] | targets['binaries']: self._dump_library(lib) generated_reqs = OrderedSet() if targets['thrifts']: for thr in set(targets['thrifts']): if thr not in self.MEMOIZED_THRIFTS: self.MEMOIZED_THRIFTS[ thr] = self._generate_thrift_requirement(thr) generated_reqs.add(self.MEMOIZED_THRIFTS[thr]) with ParseContext.temp(): # trick pants into letting us add this python requirement, otherwise we get # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to # PythonRequirement(thrift) # # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we # should first detect if any explicit thrift requirements have been added and use # those. Only if they have not been supplied should we auto-inject it. generated_reqs.add( PythonRequirement( 'thrift', use_2to3=True, name='thrift-' + ''.join(random.sample('0123456789abcdef' * 8, 8)))) for antlr in targets['antlrs']: generated_reqs.add(self._generate_antlr_requirement(antlr)) targets['reqs'] |= generated_reqs for req in targets['reqs']: if not req.should_build(self._interpreter.python, Platform.current()): self.debug('Skipping %s based upon version filter' % req) continue self._dump_requirement(req._requirement, False, req._repository) reqs_to_build = ( req for req in targets['reqs'] if req.should_build(self._interpreter.python, Platform.current())) for dist in self._resolver.resolve(reqs_to_build, interpreter=self._interpreter): self._dump_distribution(dist) if len(targets['binaries']) > 1: print('WARNING: Target has multiple python_binary targets!', file=sys.stderr) return self._builder
def test_parse(self): with temporary_dir() as root_dir: buildfile = create_buildfile( root_dir, 'a', ''' with open('b', 'w') as b: b.write('jack spratt') '''.strip()) b_file = os.path.join(root_dir, 'a', 'b') self.assertFalse(os.path.exists(b_file)) ParseContext(buildfile).parse() with open(b_file, 'r') as b: self.assertEquals('jack spratt', b.read())
def _generate_requirement(self, library, builder_cls): library_key = self._key_generator.key_for_target(library) builder = builder_cls(library, self._root, self._config, '-' + library_key.hash[:8]) cache_dir = os.path.join(self._egg_cache_root, library_key.id) if self._build_invalidator.needs_update(library_key): sdist = builder.build(interpreter=self._interpreter) safe_mkdir(cache_dir) shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist))) self._build_invalidator.update(library_key) with ParseContext.temp(): return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)
def test_on_context_exit(self): with temporary_dir() as root_dir: parse_context = ParseContext(create_buildfile(root_dir, 'a')) with pytest.raises(ContextError): parse_context.on_context_exit(lambda: 37) with temporary_dir() as root_dir: buildfile = create_buildfile(root_dir, 'a', '''import os from twitter.pants.base import ParseContext def leave_a_trail(file, contents=''): with open(file, 'w') as b: b.write(contents) b_file = os.path.join(os.path.dirname(__file__), 'b') ParseContext.locate().on_context_exit(leave_a_trail, b_file, contents='42') assert not os.path.exists(b_file), 'Expected context exit action to be delayed.' '''.strip()) b_file = os.path.join(root_dir, 'a', 'b') self.assertFalse(os.path.exists(b_file)) ParseContext(buildfile).parse() with open(b_file, 'r') as b: self.assertEquals('42', b.read())
def test_binary_cycle(self): with ParseContext.temp(): foo = python_library(name='foo', provides=setup_py( name='foo', version='0.0.0', ).with_binaries(foo_binary=pants(':foo_bin'))) foo_bin = python_binary(name='foo_bin', entry_point='foo.bin.foo', dependencies=[pants(':foo')]) with pytest.raises(TargetDefinitionException): SetupPy.minified_dependencies(foo)
def compiled_idl(cls, idl_dep, generated_deps=None, compiler=None, language=None, namespace_map=None): """Marks a jar as containing IDL files that should be fetched and processed locally. idl_dep: A dependency resolvable to a single jar library. generated_deps: Dependencies for the code that will be generated from "idl_dep" compiler: The thrift compiler to apply to the fetched thrift IDL files. language: The language to generate code for - supported by some compilers namespace_map: A mapping from IDL declared namespaces to custom namespaces - supported by some compilers. """ deps = [t for t in idl_dep.resolve() if t.is_concrete] if not len(deps) == 1: raise TaskError('Can only arrange for compiled idl for a single dependency at a time, ' 'given:\n\t%s' % '\n\t'.join(map(str, deps))) jar = deps.pop() if not isinstance(jar, JarDependency): raise TaskError('Can only arrange for compiled idl from a jar dependency, given: %s' % jar) request = (jar, compiler, language) namespace_signature = None if namespace_map: sha = hashlib.sha1() for ns_from, ns_to in sorted(namespace_map.items()): sha.update(ns_from) sha.update(ns_to) namespace_signature = sha.hexdigest() request += (namespace_signature,) if request not in cls._PLACEHOLDER_BY_REQUEST: if not cls._EXTRACT_BASE: config = Config.load() cls._EXTRACT_BASE = config.get('idl-extract', 'workdir') safe_mkdir(cls._EXTRACT_BASE) SourceRoot.register(cls._EXTRACT_BASE, JavaThriftLibrary) with ParseContext.temp(cls._EXTRACT_BASE): # TODO(John Sirois): abstract ivy specific configurations notion away jar._configurations.append('idl') jar.with_artifact(configuration='idl', classifier='idl') target_name = '-'.join(filter(None, (jar.id, compiler, language, namespace_signature))) placeholder = JavaThriftLibrary(target_name, sources=None, dependencies=[jar] + (generated_deps or []), compiler=compiler, language=language, namespace_map=namespace_map) cls._PLACEHOLDER_BY_REQUEST[request] = placeholder cls._PLACEHOLDERS_BY_JAR[jar].append(placeholder) return cls._PLACEHOLDER_BY_REQUEST[request]
def __init__(self, base=None, mapper=None, relative_to=None): """ :param mapper: Function that takes a path string and returns a path string. Takes a path in the source tree, returns a path to use in the resulting bundle. By default, an identity mapper. :param string relative_to: Set up a simple mapping from source path to bundle path. E.g., ``relative_to='common'`` removes that prefix from all files in the application bundle. """ if mapper and relative_to: raise ValueError( "Must specify exactly one of 'mapper' or 'relative_to'") if relative_to: base = base or ParseContext.path(relative_to) if not os.path.isdir(base): raise ValueError( 'Could not find a directory to bundle relative to at %s' % base) self.mapper = RelativeToMapper(base) else: self.mapper = mapper or RelativeToMapper(base or ParseContext.path()) self.filemap = {}
def __init__(self, spec): # it's critical the spec is parsed 1st, the results are needed elsewhere in constructor flow parse_context = ParseContext.locate() def parse_address(): if spec.startswith(':'): # the :[target] could be in a sibling BUILD - so parse using the canonical address pathish = "%s:%s" % (parse_context.buildfile.canonical_relpath, spec[1:]) return Address.parse(parse_context.buildfile.root_dir, pathish, False) else: return Address.parse(parse_context.buildfile.root_dir, spec, False) self.address = parse_address() Target.__init__(self, self.address.target_name, False)
def generate_test_targets(cls): if cls.TESTING_TARGETS is None: with ParseContext.temp(): cls.TESTING_TARGETS = [ PythonRequirement('pytest'), PythonRequirement('pytest-cov'), PythonRequirement('coverage==3.6b1'), PythonRequirement( 'unittest2', version_filter=lambda py, pl: py.startswith('2')), PythonRequirement( 'unittest2py3k', version_filter=lambda py, pl: py.startswith('3')) ] return cls.TESTING_TARGETS
def test_on_context_exit(self): with temporary_dir() as root_dir: parse_context = ParseContext(create_buildfile(root_dir, 'a')) with pytest.raises(ContextError): parse_context.on_context_exit(lambda: 37) with temporary_dir() as root_dir: buildfile = create_buildfile(root_dir, 'a', content=dedent(''' import os from twitter.pants.base import ParseContext def leave_a_trail(file, contents=''): with open(file, 'w') as b: b.write(contents) b_file = os.path.join(os.path.dirname(__file__), 'b') ParseContext.locate().on_context_exit(leave_a_trail, b_file, contents='42') assert not os.path.exists(b_file), 'Expected context exit action to be delayed.' ''').strip()) b_file = os.path.join(root_dir, 'a', 'b') self.assertFalse(os.path.exists(b_file)) ParseContext(buildfile).parse() with open(b_file, 'r') as b: self.assertEquals('42', b.read())
def __init__(self, spec): # it's critical the spec is parsed 1st, the results are needed elsewhere in constructor flow parse_context = ParseContext.locate() def parse_address(): if spec.startswith(':'): # the :[target] could be in a sibling BUILD - so parse using the canonical address pathish = "%s:%s" % (parse_context.buildfile.canonical_relpath, spec[1:]) return Address.parse(parse_context.buildfile.root_dir, pathish, False) else: return Address.parse(parse_context.buildfile.root_dir, spec, False) self.address = parse_address() # We must disable the re-init check, because our funky __getattr__ breaks it. # We're not involved in any multiple inheritance, so it's OK to disable it here. Target.__init__(self, self.address.target_name, False, reinit_check=False)
def dump(self): self.debug('Building PythonBinary %s:' % self._target) targets = self.resolve([self._target] + self._extra_targets) for lib in targets['libraries'] | targets['binaries']: self._dump_library(lib) generated_reqs = OrderedSet() if targets['thrifts']: for thr in set(targets['thrifts']): if thr not in self.MEMOIZED_THRIFTS: self.MEMOIZED_THRIFTS[thr] = self._generate_thrift_requirement(thr) generated_reqs.add(self.MEMOIZED_THRIFTS[thr]) with ParseContext.temp(): # trick pants into letting us add this python requirement, otherwise we get # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to # PythonRequirement(thrift) # # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we # should first detect if any explicit thrift requirements have been added and use # those. Only if they have not been supplied should we auto-inject it. generated_reqs.add(PythonRequirement('thrift', use_2to3=True, name='thrift-' + ''.join(random.sample('0123456789abcdef' * 8, 8)))) for antlr in targets['antlrs']: generated_reqs.add(self._generate_antlr_requirement(antlr)) targets['reqs'] |= generated_reqs for req in targets['reqs']: if not req.should_build(self._interpreter.python, Platform.current()): self.debug('Skipping %s based upon version filter' % req) continue self._dump_requirement(req._requirement, False, req._repository) reqs_to_build = (req for req in targets['reqs'] if req.should_build(self._interpreter.python, Platform.current())) for dist in self._resolver.resolve(reqs_to_build, interpreter=self._interpreter): self._dump_distribution(dist) if len(targets['binaries']) > 1: print('WARNING: Target has multiple python_binary targets!', file=sys.stderr) return self._builder
def test_binary_cycle(self): with ParseContext.temp(): foo = python_library( name = 'foo', provides = setup_py( name = 'foo', version = '0.0.0', ).with_binaries( foo_binary = pants(':foo_bin') ) ) foo_bin = python_binary( name = 'foo_bin', entry_point = 'foo.bin.foo', dependencies = [ pants(':foo') ] ) with pytest.raises(TargetDefinitionException): SetupPy.minified_dependencies(foo)
def __init__(self, spec): # it's critical the spec is parsed 1st, the results are needed elsewhere in constructor flow parse_context = ParseContext.locate() def parse_address(): if spec.startswith(':'): # the :[target] could be in a sibling BUILD - so parse using the canonical address pathish = "%s:%s" % (parse_context.buildfile.canonical_relpath, spec[1:]) return Address.parse(parse_context.buildfile.root_dir, pathish, False) else: return Address.parse(parse_context.buildfile.root_dir, spec, False) self.address = parse_address() # We must disable the re-init check, because our funky __getattr__ breaks it. # We're not involved in any multiple inheritance, so it's OK to disable it here. Target.__init__(self, self.address.target_name, reinit_check=False)
def test_jar_dependency(self): with ParseContext.temp(): org, name = "org", "name" # thing to override nay = JarDependency(org, name, "0.0.1") yea = JarDependency(org, name, "0.0.8") # define targets depend on different 'org:c's JarLibrary("c", [nay]) JarLibrary("b", [yea]) # then depend on those targets transitively, and override to the correct version l = JarLibrary( "a", dependencies=[Pants(":c")], overrides=[":b"]) # confirm that resolving includes the correct version resolved = set(l.resolve()) self.assertTrue(yea in resolved) # and attaches an exclude directly to the JarDependency self.assertTrue(Exclude(org, name) in nay.excludes)
def parse_spec(self, error, spec): if spec.endswith('::'): self.add_target_recursive(spec[:-len('::')]) elif spec.endswith(':'): self.add_target_directory(spec[:-len(':')]) else: try: address = Address.parse(get_buildroot(), spec) ParseContext(address.buildfile).parse() target = Target.get(address) if target: self.targets.append(target) else: siblings = Target.get_all_addresses(address.buildfile) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' error('%s => %s?:\n %s' % (address, prompt, '\n '.join(str(a) for a in siblings))) except (TypeError, ImportError, TaskError, GoalError): error(spec, include_traceback=True) except (IOError, SyntaxError): error(spec)
def __init__(self, name, sources=None, resources=None, dependencies=None, module="", module_root="src/python"): """ name = Name of library sources = Python source files resources = non-Python resources, e.g. templates, keys, other data (it is recommended that your application uses the pkgutil package to access these resources in a .zip-module friendly way.) dependencies = other PythonLibraries, Eggs or internal Pants targets module = everything beneath module_root is relative to this module name, None if root namespace module_root = see above """ context = ParseContext.locate() self._module = module PythonTarget.__init__(self, module_root, name, sources, resources, dependencies, False)
def test_binary_target_injected_into_minified_dependencies_with_provider(self): with ParseContext.temp(): bar = python_library( name = 'bar', provides = setup_py( name = 'bar', version = '0.0.0', ).with_binaries( bar_binary = pants(':bar_bin') ) ) bar_bin = python_binary( name = 'bar_bin', entry_point = 'bar.bin.bar', dependencies = [ pants(':bar_bin_dep') ] ) bar_bin_dep = python_library( name = 'bar_bin_dep', provides = setup_py( name = 'bar_bin_dep', version = '0.0.0', ) ) assert SetupPy.minified_dependencies(bar) == OrderedSet([bar_bin, bar_bin_dep]) entry_points = dict(SetupPy.iter_entry_points(bar)) assert entry_points == {'bar_binary': 'bar.bin.bar'} with self.run_execute(bar, recursive=False) as setup_py_command: setup_py_command.run_one.assert_called_with(bar) with self.run_execute(bar, recursive=True) as setup_py_command: setup_py_command.run_one.assert_has_calls([ call(bar), call(bar_bin_dep) ], any_order=True)
def __init__(self, name, sources=None, resources=None, exclusives=None): TargetWithSources.__init__(self, name, sources=sources, exclusives=exclusives) if resources is not None: def is_resources(item): return (isinstance(item, Target) and all( map(lambda tgt: isinstance(tgt, Resources), filter(lambda tgt: is_concrete(tgt), item.resolve())))) if is_resources(resources): self.resources = list(self.resolve_all(resources, Resources)) elif isinstance(resources, Sequence) and all( map(is_resources, resources)): self.resources = list(self.resolve_all(resources, Resources)) else: # Handle parallel resource dir globs. # For example, for a java_library target base of src/main/java: # src/main/java/com/twitter/base/BUILD # We get: # sibling_resources_base = src/main/resources # base_relpath = com/twitter/base # resources_dir = src/main/resources/com/twitter/base # # TODO(John Sirois): migrate projects to Resources and remove support for old style assumed # parallel resources dirs sibling_resources_base = os.path.join( os.path.dirname(self.target_base), 'resources') base_relpath = os.path.relpath(self.address.buildfile.relpath, self.target_base) resources_dir = os.path.join(sibling_resources_base, base_relpath) with ParseContext.temp(basedir=resources_dir): self.resources = [Resources(name, resources)]
def test_binary_target_injected_into_minified_dependencies(self): with ParseContext.temp(): foo = python_library(name='foo', provides=setup_py( name='foo', version='0.0.0', ).with_binaries(foo_binary=pants(':foo_bin'))) foo_bin = python_binary(name='foo_bin', entry_point='foo.bin.foo', dependencies=[pants(':foo_bin_dep')]) foo_bin_dep = python_library(name='foo_bin_dep') assert SetupPy.minified_dependencies(foo) == OrderedSet( [foo_bin, foo_bin_dep]) entry_points = dict(SetupPy.iter_entry_points(foo)) assert entry_points == {'foo_binary': 'foo.bin.foo'} with self.run_execute(foo, recursive=False) as setup_py_command: setup_py_command.run_one.assert_called_with(foo) with self.run_execute(foo, recursive=True) as setup_py_command: setup_py_command.run_one.assert_called_with(foo)
def setup_parser(self, parser, args): self.config = Config.load() Goal.add_global_options(parser) # We support attempting zero or more goals. Multiple goals must be delimited from further # options and non goal args with a '--'. The key permutations we need to support: # ./pants goal => goals # ./pants goal goals => goals # ./pants goal compile src/java/... => compile # ./pants goal compile -x src/java/... => compile # ./pants goal compile src/java/... -x => compile # ./pants goal compile run -- src/java/... => compile, run # ./pants goal compile run -- src/java/... -x => compile, run # ./pants goal compile run -- -x src/java/... => compile, run if not args: args.append('goals') if len(args) == 1 and args[0] in set(['-h', '--help', 'help']): def format_usage(usages): left_colwidth = 0 for left, right in usages: left_colwidth = max(left_colwidth, len(left)) lines = [] for left, right in usages: lines.append(' %s%s%s' % (left, ' ' * (left_colwidth - len(left) + 1), right)) return '\n'.join(lines) usages = [ ("%prog goal goals ([spec]...)", Phase('goals').description), ("%prog goal help [goal] ([spec]...)", Phase('help').description), ("%prog goal [goal] [spec]...", "Attempt goal against one or more targets."), ("%prog goal [goal] ([goal]...) -- [spec]...", "Attempts all the specified goals."), ] parser.set_usage("\n%s" % format_usage(usages)) parser.epilog = ("Either lists all installed goals, provides extra help for a goal or else " "attempts to achieve the specified goal for the listed targets." """ Note that target specs accept two special forms: [dir]: to include all targets in the specified directory [dir]:: to include all targets found in all BUILD files recursively under the directory""") parser.print_help() sys.exit(0) else: goals, specs = Goal.parse_args(args) # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out ParseContext.enable_pantsnew() # Bootstrap goals by loading any configured bootstrap BUILD files with self.check_errors('The following bootstrap_buildfiles cannot be loaded:') as error: for path in self.config.getlist('goals', 'bootstrap_buildfiles', default = []): try: buildfile = BuildFile(get_buildroot(), os.path.relpath(path, get_buildroot())) ParseContext(buildfile).parse() except (TypeError, ImportError, TaskError, GoalError): error(path, include_traceback=True) except (IOError, SyntaxError): error(path) # Bootstrap user goals by loading any BUILD files implied by targets with self.check_errors('The following targets could not be loaded:') as error: for spec in specs: self.parse_spec(error, spec) self.phases = [Phase(goal) for goal in goals] rcfiles = self.config.getdefault('rcfiles', type=list, default=[]) if rcfiles: rcfile = RcFile(rcfiles, default_prepend=False, process_default=True) # Break down the goals specified on the command line to the full set that will be run so we # can apply default flags to inner goal nodes. Also break down goals by Task subclass and # register the task class hierarchy fully qualified names so we can apply defaults to # baseclasses. all_goals = Phase.execution_order(Phase(goal) for goal in goals) sections = OrderedSet() for goal in all_goals: sections.add(goal.name) for clazz in goal.task_type.mro(): if clazz == Task: break sections.add('%s.%s' % (clazz.__module__, clazz.__name__)) augmented_args = rcfile.apply_defaults(sections, args) if augmented_args != args: del args[:] args.extend(augmented_args) print("(using pantsrc expansion: pants goal %s)" % ' '.join(augmented_args)) Phase.setup_parser(parser, args, self.phases)
def test_in_a(): self.assertEquals(a_context, ParseContext.locate()) return b_context.do_in_context(lambda: ParseContext.locate())
def of(cls, target): with ParseContext.temp(): return cls(target.name, dependencies=[target])
def __init__(self, run_tracker, root_dir, parser, argv): Command.__init__(self, run_tracker, root_dir, parser, argv) self.target = None self.extra_targets = [] self.config = Config.load() self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug) self.interpreter_cache.setup() interpreters = self.interpreter_cache.select_interpreter( list(self.interpreter_cache.matches([self.options.interpreter] if self.options.interpreter else ['']))) if len(interpreters) != 1: self.error('Unable to detect suitable interpreter.') self.interpreter = interpreters[0] for req in self.options.extra_requirements: with ParseContext.temp(): self.extra_targets.append(PythonRequirement(req, use_2to3=True)) # We parse each arg in the context of the cli usage: # ./pants command (options) [spec] (build args) # ./pants command (options) [spec]... -- (build args) # Our command token and our options are parsed out so we see args of the form: # [spec] (build args) # [spec]... -- (build args) binaries = [] for k in range(len(self.args)): arg = self.args.pop(0) if arg == '--': break def not_a_target(debug_msg): self.debug('Not a target, assuming option: %s.' % e) # We failed to parse the arg as a target or else it was in valid address format but did not # correspond to a real target. Assume this is the 1st of the build args and terminate # processing args for target addresses. self.args.insert(0, arg) target = None try: address = Address.parse(root_dir, arg) target = Target.get(address) if target is None: not_a_target(debug_msg='Unrecognized target') break except Exception as e: not_a_target(debug_msg=e) break for resolved in filter(lambda t: t.is_concrete, target.resolve()): if isinstance(resolved, PythonBinary): binaries.append(resolved) else: self.extra_targets.append(resolved) if len(binaries) == 0: # treat as a chroot pass elif len(binaries) == 1: # We found a binary and are done, the rest of the args get passed to it self.target = binaries[0] else: self.error('Can only process 1 binary target, %s contains %d:\n\t%s' % ( arg, len(binaries), '\n\t'.join(str(binary.address) for binary in binaries) )) if self.target is None: if not self.extra_targets: self.error('No valid target specified!') self.target = self.extra_targets.pop(0)