def test_derivation(self): a = self.make_target('a') a_addr = a.address b_addr = Address.parse('b') self.build_graph.inject_synthetic_target(b_addr, Target, derived_from=a) b = self.build_graph.get_target(b_addr) c_addr = Address.parse('c') self.build_graph.inject_synthetic_target(c_addr, Target, derived_from=a) c = self.build_graph.get_target(c_addr) d_addr = Address.parse('d') self.build_graph.inject_synthetic_target(d_addr, Target, derived_from=b) d = self.build_graph.get_target(d_addr) self.assertEqual(a, self.build_graph.get_derived_from(a_addr)) self.assertEqual(a, self.build_graph.get_derived_from(b_addr)) self.assertEqual(a, self.build_graph.get_derived_from(c_addr)) self.assertEqual(b, self.build_graph.get_derived_from(d_addr)) self.assertEqual(a, self.build_graph.get_concrete_derived_from(a_addr)) self.assertEqual(a, self.build_graph.get_concrete_derived_from(b_addr)) self.assertEqual(a, self.build_graph.get_concrete_derived_from(c_addr)) self.assertEqual(a, self.build_graph.get_concrete_derived_from(d_addr)) self.assertEqual([b, c], self.build_graph.get_direct_derivatives(a_addr)) self.assertEqual([d], self.build_graph.get_direct_derivatives(b_addr)) self.assertEqual([], self.build_graph.get_direct_derivatives(c_addr)) self.assertEqual([], self.build_graph.get_direct_derivatives(d_addr)) self.assertEqual([b, c, d], self.build_graph.get_all_derivatives(a_addr)) self.assertEqual([d], self.build_graph.get_all_derivatives(b_addr)) self.assertEqual([], self.build_graph.get_all_derivatives(c_addr)) self.assertEqual([], self.build_graph.get_all_derivatives(d_addr))
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs') self.graph, self.scheduler = setup_json_scheduler(build_root) self.guava = self.graph.resolve(Address.parse('3rdparty/jvm:guava')) self.thrift = self.graph.resolve(Address.parse('src/thrift/codegen/simple')) self.java = self.graph.resolve(Address.parse('src/java/codegen/simple'))
def make_target(self, spec="", target_type=Target, dependencies=None, derived_from=None, **kwargs): """Creates a target and injects it into the test's build graph. :param string spec: The target address spec that locates this target. :param type target_type: The concrete target subclass to create this new target from. :param list dependencies: A list of target instances this new target depends on. :param derived_from: The target this new target was derived from. :type derived_from: :class:`pants.build_graph.target.Target` """ address = Address.parse(spec) target = target_type(name=address.target_name, address=address, build_graph=self.build_graph, **kwargs) dependencies = dependencies or [] self.build_graph.inject_target( target, dependencies=[dep.address for dep in dependencies], derived_from=derived_from ) # TODO(John Sirois): This re-creates a little bit too much work done by the BuildGraph. # Fixup the BuildGraph to deal with non BuildFileAddresses better and just leverage it. for traversable_dependency_spec in target.traversable_dependency_specs: traversable_dependency_address = Address.parse(traversable_dependency_spec, relative_to=address.spec_path) traversable_dependency_target = self.build_graph.get_target(traversable_dependency_address) if not traversable_dependency_target: raise ValueError( "Tests must make targets for traversable dependency specs ahead of them " "being traversed, {} tried to traverse {} which does not exist.".format( target, traversable_dependency_address ) ) if traversable_dependency_target not in target.dependencies: self.build_graph.inject_dependency(dependent=target.address, dependency=traversable_dependency_address) target.mark_transitive_invalidation_hash_dirty() return target
def make_target(self, spec='', target_type=Target, dependencies=None, derived_from=None, synthetic=False, make_missing_sources=True, **kwargs): """Creates a target and injects it into the test's build graph. :API: public :param string spec: The target address spec that locates this target. :param type target_type: The concrete target subclass to create this new target from. :param list dependencies: A list of target instances this new target depends on. :param derived_from: The target this new target was derived from. :type derived_from: :class:`pants.build_graph.target.Target` """ self._init_target_subsystem() address = Address.parse(spec) if make_missing_sources and 'sources' in kwargs: for source in kwargs['sources']: if '*' not in source: self.create_file(os.path.join(address.spec_path, source), mode='a', contents='') kwargs['sources'] = self.sources_for(kwargs['sources'], address.spec_path) target = target_type(name=address.target_name, address=address, build_graph=self.build_graph, **kwargs) dependencies = dependencies or [] self.build_graph.apply_injectables([target]) self.build_graph.inject_target(target, dependencies=[dep.address for dep in dependencies], derived_from=derived_from, synthetic=synthetic) # TODO(John Sirois): This re-creates a little bit too much work done by the BuildGraph. # Fixup the BuildGraph to deal with non BuildFileAddresses better and just leverage it. traversables = [target.compute_dependency_specs(payload=target.payload)] for dependency_spec in itertools.chain(*traversables): dependency_address = Address.parse(dependency_spec, relative_to=address.spec_path) dependency_target = self.build_graph.get_target(dependency_address) if not dependency_target: raise ValueError('Tests must make targets for dependency specs ahead of them ' 'being traversed, {} tried to traverse {} which does not exist.' .format(target, dependency_address)) if dependency_target not in target.dependencies: self.build_graph.inject_dependency(dependent=target.address, dependency=dependency_address) target.mark_transitive_invalidation_hash_dirty() return target
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), "examples", "scheduler_inputs") self.graph, self.scheduler = setup_json_scheduler(build_root) self.guava = self.graph.resolve(Address.parse("3rdparty/jvm:guava")) self.thrift = self.graph.resolve(Address.parse("src/thrift/codegen/simple")) self.java = self.graph.resolve(Address.parse("src/java/codegen/simple")) self.java_multi = self.graph.resolve(Address.parse("src/java/multiple_classpath_entries")) self.unconfigured_thrift = self.graph.resolve(Address.parse("src/thrift/codegen/unconfigured"))
def test_target_invalid(self): self.add_to_build_file('a/BUILD', 'target(name="a")') with self.assertRaises(AddressLookupError): self.build_graph.inject_address_closure(Address.parse('a:nope')) self.add_to_build_file('b/BUILD', 'target(name="a")') with self.assertRaises(AddressLookupError): self.build_graph.inject_address_closure(Address.parse('b')) with self.assertRaises(AddressLookupError): self.build_graph.inject_address_closure(Address.parse('b:b'))
def test_create_single(self): address_family = AddressFamily.create('', [AddressMap('0', { 'one': Thing(name='one', age=42), 'two': Thing(name='two', age=37) })]) self.assertEqual('', address_family.namespace) self.assertEqual({Address.parse('//:one'): Thing(name='one', age=42), Address.parse('//:two'): Thing(name='two', age=37)}, address_family.addressables)
def test_invalidation_relative(self): resolved = self.address_mapper.resolve(Address.parse('a/b')) self.assertEqual(self.a_b_target, resolved) build_file = os.path.join(self.build_root, 'a/b/b.BUILD.json') os.unlink(build_file) self.assertIs(resolved, self.address_mapper.resolve(Address.parse('a/b'))) self.address_mapper.invalidate_build_file('a/b/b.BUILD.json') with self.assertRaises(ResolveError): self.address_mapper.resolve(Address.parse('a/b'))
def test_create_multiple(self): address_family = AddressFamily.create('name/space', [AddressMap('name/space/0', {'one': Thing(name='one', age=42)}), AddressMap('name/space/1', {'two': Thing(name='two', age=37)})]) self.assertEqual('name/space', address_family.namespace) self.assertEqual({Address.parse('name/space:one'): Thing(name='one', age=42), Address.parse('name/space:two'): Thing(name='two', age=37)}, address_family.addressables)
def inject_address_closure(self, address): if self.contains_address(address): # The address was either mapped in or synthetically injected already. return if address in self._addresses_already_closed: # We've visited this address already in the course of the active recursive injection. return mapper = self._address_mapper target_address, target_addressable = mapper.resolve(address) self._addresses_already_closed.add(target_address) try: dep_addresses = list(mapper.specs_to_addresses(target_addressable.dependency_specs, relative_to=target_address.spec_path)) deps_seen = set() for dep_address in dep_addresses: if dep_address in deps_seen: raise self.DuplicateAddressError( "Addresses in dependencies must be unique. '{spec}' is referenced more than once." .format(spec=dep_address.spec)) deps_seen.add(dep_address) self.inject_address_closure(dep_address) if not self.contains_address(target_address): target = self._target_addressable_to_target(target_address, target_addressable) self.apply_injectables([target]) self.inject_target(target, dependencies=dep_addresses) else: for dep_address in dep_addresses: if dep_address not in self.dependencies_of(target_address): self.inject_dependency(target_address, dep_address) target = self.get_target(target_address) for traversable_spec in target.compute_dependency_specs(payload=target.payload): traversable_address = Address.parse(traversable_spec, relative_to=target_address.spec_path) self.maybe_inject_address_closure(traversable_address) if not any(traversable_address == t.address for t in target.dependencies): self.inject_dependency(dependent=target.address, dependency=traversable_address) target.mark_transitive_invalidation_hash_dirty() for traversable_spec in target.compute_injectable_specs(payload=target.payload): traversable_address = Address.parse(traversable_spec, relative_to=target_address.spec_path) self.maybe_inject_address_closure(traversable_address) target.mark_transitive_invalidation_hash_dirty() except AddressLookupError as e: raise self.TransitiveLookupError("{message}\n referenced from {spec}" .format(message=e, spec=target_address.spec))
def test_invalidation_un_normalized(self): resolved = self.address_mapper.resolve(Address.parse('a/b')) self.assertEqual(self.a_b_target, resolved) os.unlink(os.path.join(self.build_root, 'a/b/b.BUILD.json')) self.assertIs(resolved, self.address_mapper.resolve(Address.parse('a/b'))) un_normalized_build_root = os.path.join(self.work_dir, 'build_root_linked') os.symlink(self.build_root, un_normalized_build_root) un_normalized_build_file = os.path.join(un_normalized_build_root, 'a/b/b.BUILD.json') self.address_mapper.invalidate_build_file(un_normalized_build_file) with self.assertRaises(ResolveError): self.address_mapper.resolve(Address.parse('a/b'))
def test_resolve_cache(self): scheduler = self.create_json() nonstrict_address = Address.parse('graph_test:nonstrict') nonstrict = self.resolve(scheduler, nonstrict_address) self.assertEquals(nonstrict, self.resolve(scheduler, nonstrict_address)) # The already resolved `nonstrict` interior node should be re-used by `java1`. java1_address = Address.parse('graph_test:java1') java1 = self.resolve(scheduler, java1_address) self.assertEquals(nonstrict, java1.configurations[1]) self.assertEquals(java1, self.resolve(scheduler, java1_address))
def test_resolve_cache(self): graph = self.create_json_graph() nonstrict_address = Address.parse('examples/graph_test:nonstrict') nonstrict = graph.resolve(nonstrict_address) self.assertIs(nonstrict, graph.resolve(nonstrict_address)) # The already resolved `nonstrict` interior node should be re-used by `java1`. java1_address = Address.parse('examples/graph_test:java1') java1 = graph.resolve(java1_address) self.assertIs(nonstrict, java1.configurations[1]) self.assertIs(java1, graph.resolve(java1_address))
def make_target(self, spec='', target_type=Target, dependencies=None, derived_from=None, synthetic=False, **kwargs): """Creates a target and injects it into the test's build graph. :API: public :param string spec: The target address spec that locates this target. :param type target_type: The concrete target subclass to create this new target from. :param list dependencies: A list of target instances this new target depends on. :param derived_from: The target this new target was derived from. :type derived_from: :class:`pants.build_graph.target.Target` """ address = Address.parse(spec) target = target_type(name=address.target_name, address=address, build_graph=self.build_graph, **kwargs) dependencies = dependencies or [] self.build_graph.apply_injectables([target]) self.build_graph.inject_target(target, dependencies=[dep.address for dep in dependencies], derived_from=derived_from, synthetic=synthetic) # TODO(John Sirois): This re-creates a little bit too much work done by the BuildGraph. # Fixup the BuildGraph to deal with non BuildFileAddresses better and just leverage it. traversables = [target.compute_dependency_specs(payload=target.payload)] # Only poke `traversable_dependency_specs` if a concrete implementation is defined # in order to avoid spurious deprecation warnings. if type(target).traversable_dependency_specs is not Target.traversable_dependency_specs: traversables.append(target.traversable_dependency_specs) for dependency_spec in itertools.chain(*traversables): dependency_address = Address.parse(dependency_spec, relative_to=address.spec_path) dependency_target = self.build_graph.get_target(dependency_address) if not dependency_target: raise ValueError('Tests must make targets for dependency specs ahead of them ' 'being traversed, {} tried to traverse {} which does not exist.' .format(target, dependency_address)) if dependency_target not in target.dependencies: self.build_graph.inject_dependency(dependent=target.address, dependency=dependency_address) target.mark_transitive_invalidation_hash_dirty() return target
def test_no_address_no_family(self): with self.assertRaises(ResolveError): self.address_mapper.resolve(Address.parse('a/c')) # Errors are not cached. with self.assertRaises(ResolveError): self.address_mapper.resolve(Address.parse('a/c')) build_file = os.path.join(self.build_root, 'a/c/c.BUILD.json') with safe_open(build_file, 'w') as fp: fp.write('{"type_alias": "struct", "name": "c"}') resolved = self.address_mapper.resolve(Address.parse('a/c')) self.assertEqual(Struct(name='c'), resolved)
def populate_target_dict(self, target_map): """Return a dict containing targets with files generated according to `target_map`. The keys of `target_map` are target address strings, while the values of `target_map` should be a dict which contains keyword arguments fed into `self.make_target()`, along with a few special keys. Special keys are: - 'key': used to access the target in the returned dict. Defaults to the target address spec. - 'filemap': creates files at the specified relative paths to the target. An `OrderedDict` of 2-tuples must be used with the targets topologically ordered, if they have dependencies on each other. Note that dependency cycles are not currently supported with this method. :param target_map: Dict mapping each target address to generate -> kwargs for `self.make_target()`, along with a 'key' and optionally a 'filemap' argument. :return: Dict mapping the required 'key' argument -> target instance for each element of `target_map`. :rtype: dict """ target_dict = {} # Create a target from each specification and insert it into `target_dict`. for target_spec, target_kwargs in target_map.items(): unprocessed_kwargs = target_kwargs.copy() target_base = Address.parse(target_spec).spec_path # Populate the target's owned files from the specification. filemap = unprocessed_kwargs.pop('filemap', {}) for rel_path, content in filemap.items(): buildroot_path = os.path.join(target_base, rel_path) self.create_file(buildroot_path, content) # Ensure any dependencies exist in the target dict (`target_map` must then be an # OrderedDict). # The 'key' is used to access the target in `target_dict`, and defaults to `target_spec`. target_address = Address.parse(target_spec) key = unprocessed_kwargs.pop('key', target_address.target_name) dep_targets = [] for dep_spec in unprocessed_kwargs.pop('dependencies', []): existing_tgt_key = target_map[dep_spec]['key'] dep_targets.append(target_dict[existing_tgt_key]) # Register the generated target. generated_target = self.make_target( spec=target_spec, dependencies=dep_targets, **unprocessed_kwargs) target_dict[key] = generated_target return target_dict
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), "examples", "scheduler_inputs") self.graph, self.scheduler = setup_json_scheduler(build_root) self.engine = LocalSerialEngine(self.scheduler) self.guava = self.graph.resolve(Address.parse("3rdparty/jvm:guava")) self.thrift = self.graph.resolve(Address.parse("src/thrift/codegen/simple")) self.java = self.graph.resolve(Address.parse("src/java/codegen/simple")) self.java_multi = self.graph.resolve(Address.parse("src/java/multiple_classpath_entries")) self.unconfigured_thrift = self.graph.resolve(Address.parse("src/thrift/codegen/unconfigured")) self.resources = self.graph.resolve(Address.parse("src/resources/simple")) self.consumes_resources = self.graph.resolve(Address.parse("src/java/consumes_resources")) self.consumes_managed_thirdparty = self.graph.resolve(Address.parse("src/java/managed_thirdparty")) self.managed_guava = self.graph.resolve(Address.parse("3rdparty/jvm/managed:guava")) self.managed_hadoop = self.graph.resolve(Address.parse("3rdparty/jvm/managed:hadoop-common"))
def create_sources_field(self, sources, sources_rel_path, address=None, key_arg=None): """Factory method to create a SourcesField appropriate for the type of the sources object. Note that this method is called before the call to Target.__init__ so don't expect fields to be populated! :return: a payload field object representing the sources parameter :rtype: SourcesField """ if isinstance(sources, Addresses): # Currently, this is only created by the result of from_target() which takes a single argument if len(sources.addresses) != 1: raise self.WrongNumberOfAddresses( "Expected a single address to from_target() as argument to {spec}" .format(spec=address.spec)) referenced_address = Address.parse(sources.addresses[0], relative_to=sources.rel_path) return DeferredSourcesField(ref_address=referenced_address) elif isinstance(sources, FilesetWithSpec): filespec = sources.filespec else: sources = sources or [] assert_list(sources, key_arg=key_arg) filespec = {'globs': [os.path.join(sources_rel_path, src) for src in (sources or [])]} return SourcesField(sources=sources, sources_rel_path=sources_rel_path, filespec=filespec)
def _synthetic_resources_target(self): if not self.payload.resources.source_paths: return None # Create an address for the synthetic target. spec = self.address.spec + '_synthetic_resources' resource_address = Address.parse(spec=spec) # For safety, ensure an address that's not used already, even though that's highly unlikely. while self._build_graph.contains_address(resource_address): spec += '_' resource_address = Address.parse(spec=spec) self._build_graph.inject_synthetic_target(resource_address, Resources, sources=self.payload.resources.source_paths, derived_from=self) return self._build_graph.get_target(resource_address)
def test_max_recursion(self): target_a = self.make_target('a', Target) target_b = self.make_target('b', Target, dependencies=[target_a]) self.make_target('c', Target, dependencies=[target_b]) target_a.inject_dependency(Address.parse('c')) with self.assertRaises(Target.RecursiveDepthError): target_a.transitive_invalidation_hash()
def traversable_specs(self): for spec in super(PythonTarget, self).traversable_specs: yield spec if self._provides: for spec in self._provides._binaries.values(): address = Address.parse(spec, relative_to=self.address.spec_path) yield address.spec
def execute(self): dist_targets = self.context.targets(is_local_python_dist) build_graph = self.context.build_graph if dist_targets: with self.invalidated(dist_targets, fingerprint_strategy=DefaultFingerprintStrategy(), invalidate_dependents=True) as invalidation_check: for vt in invalidation_check.invalid_vts: if vt.target.dependencies: raise TargetDefinitionException( vt.target, 'The `dependencies` field is disallowed on `python_dist` targets. ' 'List any 3rd party requirements in the install_requirements argument ' 'of your setup function.' ) self._create_dist(vt.target, vt.results_dir) for vt in invalidation_check.all_vts: dist = self._get_whl_from_dir(os.path.join(vt.results_dir, 'dist')) req_lib_addr = Address.parse('{}__req_lib'.format(vt.target.address.spec)) self._inject_synthetic_dist_requirements(dist, req_lib_addr) # Make any target that depends on the dist depend on the synthetic req_lib, # for downstream consumption. for dependent in build_graph.dependents_of(vt.target.address): build_graph.inject_dependency(dependent, req_lib_addr)
def to_jar_dependencies(relative_to, jar_library_specs, build_graph): """Convenience method to resolve a list of specs to JarLibraries and return its jars attributes. Expects that the jar_libraries are declared relative to this target. :API: public :param Address relative_to: address target that references jar_library_specs, for error messages :param list jar_library_specs: string specs to JavaLibrary targets. Note, this list should be returned by the caller's traversable_specs() implementation to make sure that the jar_dependency jars have been added to the build graph. :param BuildGraph build_graph: build graph instance used to search for specs :return: list of JarDependency instances represented by the library_specs """ jar_deps = OrderedSet() for spec in jar_library_specs: if not isinstance(spec, string_types): raise JarLibrary.ExpectedAddressError( "{address}: expected imports to contain string addresses, got {found_class}." .format(address=relative_to.spec, found_class=type(spec).__name__)) lookup = Address.parse(spec, relative_to=relative_to.spec_path) target = build_graph.get_target(lookup) if not isinstance(target, JarLibrary): raise JarLibrary.WrongTargetTypeError( "{address}: expected {spec} to be jar_library target type, got {found_class}" .format(address=relative_to.spec, spec=spec, found_class=type(target).__name__)) jar_deps.update(target.jar_dependencies) return list(jar_deps)
def java_sources(self): for spec in self._java_sources_specs: address = Address.parse(spec, relative_to=self.address.spec_path) target = self._build_graph.get_target(address) if target is None: raise TargetDefinitionException(self, 'No such java target: {}'.format(spec)) yield target
def test_codegen_simple(self): build_request = BuildRequest(goals=["compile"], addressable_roots=[self.java.address]) execution_graph = self.scheduler.execution_graph(build_request) plans = list(execution_graph.walk()) self.assertEqual(4, len(plans)) thrift_jars = [ Jar(org="org.apache.thrift", name="libthrift", rev="0.9.2"), Jar(org="commons-lang", name="commons-lang", rev="2.5"), self.graph.resolve(Address.parse("src/thrift:slf4j-api")), ] jars = [self.guava] + thrift_jars # Independent leaves 1st self.assertEqual( { ( Sources.of(".java"), Plan( func_or_task_type=gen_apache_thrift, subjects=[self.thrift], strict=True, rev="0.9.2", gen="java", sources=["src/thrift/codegen/simple/simple.thrift"], ), ), (Classpath, Plan(func_or_task_type=IvyResolve, subjects=jars, jars=jars)), }, set(self.extract_product_type_and_plan(p) for p in plans[0:2]), ) # The rest is linked. self.assertEqual( ( Classpath, Plan( func_or_task_type=Javac, subjects=[self.thrift], sources=Promise(Sources.of(".java"), self.thrift), classpath=[Promise(Classpath, jar) for jar in thrift_jars], ), ), self.extract_product_type_and_plan(plans[2]), ) self.assertEqual( ( Classpath, Plan( func_or_task_type=Javac, subjects=[self.java], sources=["src/java/codegen/simple/Simple.java"], classpath=[Promise(Classpath, self.guava), Promise(Classpath, self.thrift)], ), ), self.extract_product_type_and_plan(plans[3]), )
def test_sources_ordering(self): spec = 'testprojects/src/resources/org/pantsbuild/testproject/ordering' with self.open_scheduler([spec]) as (graph, _, _): target = graph.get_target(Address.parse(spec)) sources = [os.path.basename(s) for s in target.sources_relative_to_buildroot()] self.assertEquals(['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'], sources)
def _create_intermediate_target(self, address, suffix): """ :param string address: A target address. :param string suffix: A string used as a suffix of the intermediate target name. :returns: The address of a synthetic intermediary target. """ if not isinstance(address, six.string_types): raise self.ExpectedAddressError("Expected string address argument, got type {type}" .format(type=type(address))) address = Address.parse(address, self._parse_context.rel_path) # NB(gmalmquist): Ideally there should be a way to indicate that these targets are synthetic # and shouldn't show up in `./pants list` etc, because we really don't want people to write # handwritten dependencies on them. For now just give them names containing "-unstable-" as a # hint. hash_str = hash_target(str(address), suffix) name = '{name}-unstable-{suffix}-{index}'.format( name=address.target_name, suffix=suffix.replace(' ', '.'), index=hash_str, ) self._parse_context.create_object_if_not_exists( 'target', name=name, dependencies=[address.spec], **self.extra_target_arguments ) return ':{}'.format(name)
def injectables(self, build_graph): junit_addr = Address.parse(self.injectables_spec_for_key('library')) if not build_graph.contains_address(junit_addr): build_graph.inject_synthetic_target(junit_addr, JarLibrary, jars=[JUnit.LIBRARY_JAR], scope='forced')
def test_create_bad_targets(self): with self.assertRaises(TypeError): BuildFileAliases(targets={'fred': object()}) target = Target('fred', Address.parse('a:b'), MutableBuildGraph(address_mapper=None)) with self.assertRaises(TypeError): BuildFileAliases(targets={'fred': target})
def __init__(self, name, build_file, build_file_source_lines, target_source_lines, target_interval, dependencies, dependencies_interval): """See BuildFileManipulator.load() for how to construct one as a user.""" self.name = name self.build_file = build_file self.target_address = BuildFileAddress(build_file, name) self._build_file_source_lines = build_file_source_lines self._target_source_lines = target_source_lines self._target_interval = target_interval self._dependencies_interval = dependencies_interval self._dependencies_by_address = {} for dep in dependencies: dep_address = Address.parse(dep.spec, relative_to=build_file.spec_path) if dep_address in self._dependencies_by_address: raise BuildTargetParseError('The address {dep_address} occurred multiple times in the ' 'dependency specs for target {name} in {build_file}. ' .format(dep_address=dep_address.spec, name=name, build_file=build_file)) self._dependencies_by_address[dep_address] = dep
def addr(spec): return Address.parse(spec)
def parse_addr(a): return Address.parse(a, relative_to=address.spec_path)
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs') self.scheduler = setup_json_scheduler(build_root) self.java = Address.parse('src/java/codegen/simple')
def setup_json_scheduler(build_root, inline_nodes=True): """Return a build graph and scheduler configured for BLD.json files under the given build root. :rtype :class:`pants.engine.scheduler.LocalScheduler` """ symbol_table_cls = ExampleTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, build_pattern='BLD.json', parser_cls=JsonParser) source_roots = SourceRoots(('src/java', 'src/scala')) scrooge_tool_address = Address.parse('src/scala/scrooge') goals = { 'compile': Classpath, # TODO: to allow for running resolve alone, should split out a distinct 'IvyReport' product. 'resolve': Classpath, 'list': Address, GenGoal.name(): GenGoal, 'unpickleable': UnpickleableResult, 'ls': Files, 'cat': FilesContent, } tasks = [ # Codegen GenGoal.signature(), (JavaSources, [ Select(ThriftSources), SelectVariant(ApacheThriftJavaConfiguration, 'thrift') ], gen_apache_thrift), (PythonSources, [ Select(ThriftSources), SelectVariant(ApacheThriftPythonConfiguration, 'thrift') ], gen_apache_thrift), (ScalaSources, [ Select(ThriftSources), SelectVariant(ScroogeScalaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath) ], gen_scrooge_thrift), (JavaSources, [ Select(ThriftSources), SelectVariant(ScroogeJavaConfiguration, 'thrift'), SelectLiteral(scrooge_tool_address, Classpath) ], gen_scrooge_thrift), ] + [ # scala dependency inference (ScalaSources, [ Select(ScalaInferredDepsSources), SelectDependencies(Address, ImportedJVMPackages) ], reify_scala_sources), (ImportedJVMPackages, [ SelectProjection(FilesContent, PathGlobs, ('path_globs', ), ScalaInferredDepsSources) ], extract_scala_imports), (Address, [ Select(JVMPackageName), SelectDependencies(AddressFamily, Dirs, field='stats') ], select_package_address), (PathGlobs, [ Select(JVMPackageName), SelectLiteral(source_roots, SourceRoots) ], calculate_package_search_path), ] + [ # Remote dependency resolution (Classpath, [Select(Jar)], ivy_resolve), (Jar, [Select(ManagedJar), SelectVariant(ManagedResolve, 'resolve')], select_rev), ] + [ # Compilers (Classpath, [Select(ResourceSources)], isolate_resources), (Classpath, [Select(BuildPropertiesConfiguration)], write_name_file), (Classpath, [Select(JavaSources), SelectDependencies(Classpath, JavaSources)], javac), (Classpath, [Select(ScalaSources), SelectDependencies(Classpath, ScalaSources)], scalac), ] + [ # TODO (UnpickleableOutput, [], unpickleable_output), (UnpickleableResult, [Select(UnpickleableOutput)], unpickleable_input), ] + (create_graph_tasks(address_mapper, symbol_table_cls)) + (create_fs_tasks()) project_tree = FileSystemProjectTree(build_root) return LocalScheduler(goals, tasks, project_tree, graph_lock=None, inline_nodes=inline_nodes, graph_validator=GraphValidator(symbol_table_cls))
def test_contains_address(self): a = Address.parse('a') self.assertFalse(self.build_graph.contains_address(a)) target = Target(name='a', address=a, build_graph=self.build_graph) self.build_graph.inject_target(target) self.assertTrue(self.build_graph.contains_address(a))
def checker_target(self): self.context.resolve(self._CHECKER_ADDRESS_SPEC) return self.context.build_graph.get_target( Address.parse(self._CHECKER_ADDRESS_SPEC))
def make_target(self, spec='', target_type=Target, dependencies=None, derived_from=None, synthetic=False, make_missing_sources=True, **kwargs): """Creates a target and injects it into the test's build graph. :API: public :param string spec: The target address spec that locates this target. :param type target_type: The concrete target subclass to create this new target from. :param list dependencies: A list of target instances this new target depends on. :param derived_from: The target this new target was derived from. :type derived_from: :class:`pants.build_graph.target.Target` """ self._init_target_subsystem() address = Address.parse(spec) if make_missing_sources and 'sources' in kwargs: for source in kwargs['sources']: if '*' not in source: self.create_file(os.path.join(address.spec_path, source), mode='a', contents='') kwargs['sources'] = self.sources_for(kwargs['sources'], address.spec_path) target = target_type(name=address.target_name, address=address, build_graph=self.build_graph, **kwargs) dependencies = dependencies or [] self.build_graph.apply_injectables([target]) self.build_graph.inject_target( target, dependencies=[dep.address for dep in dependencies], derived_from=derived_from, synthetic=synthetic) # TODO(John Sirois): This re-creates a little bit too much work done by the BuildGraph. # Fixup the BuildGraph to deal with non BuildFileAddresses better and just leverage it. traversables = [ target.compute_dependency_specs(payload=target.payload) ] for dependency_spec in itertools.chain(*traversables): dependency_address = Address.parse(dependency_spec, relative_to=address.spec_path) dependency_target = self.build_graph.get_target(dependency_address) if not dependency_target: raise ValueError( 'Tests must make targets for dependency specs ahead of them ' 'being traversed, {} tried to traverse {} which does not exist.' .format(target, dependency_address)) if dependency_target not in target.dependencies: self.build_graph.inject_dependency( dependent=target.address, dependency=dependency_address) target.mark_transitive_invalidation_hash_dirty() return target
def injectables(self, build_graph): tools_jar_address = Address.parse(self._tools_jar_spec) if not build_graph.contains_address(tools_jar_address): build_graph.inject_synthetic_target(tools_jar_address, ToolsJar) elif not build_graph.get_target(tools_jar_address).is_synthetic: raise build_graph.ManualSyntheticTargetError(tools_jar_address)
def test_type_mismatch_error(self) -> None: scheduler = self.create_json() mismatch = Address.parse("graph_test:type_mismatch") self.assert_resolve_failure_type(ResolvedTypeMismatchError, mismatch, scheduler) self.do_test_trace_message(scheduler, mismatch)
def resolve(self, spec): """Returns an iterator over the target(s) the given address points to.""" address = Address.parse(spec) # NB: This is an idempotent, short-circuiting call. self.inject_address_closure(address) return self.transitive_subgraph_of_addresses([address])
def library_specs(self): """Lists of specs to resolve to jar_libraries containing more jars.""" return [Address.parse(spec, relative_to=self.address.spec_path).spec for spec in self.payload.library_specs]
def test_timeout_validation() -> None: with pytest.raises(TargetDefinitionException): Timeout(-100, address=Address.parse(":tests")) with pytest.raises(TargetDefinitionException): Timeout(0, address=Address.parse(":tests")) assert Timeout(5, address=Address.parse(":tests")).value == 5
def test_python_tests_sources_default_globs(self) -> None: self.create_files( path="", files=[*self.PYTHON_SRC_FILES, *self.PYTHON_TEST_FILES]) sources = PythonTestsSources(None, address=Address.parse(":tests")) result = self.request_single_product(SourcesResult, sources.request) assert set(result.snapshot.files) == set(self.PYTHON_TEST_FILES)
def injectables(self, build_graph): # If a javac target has been defined on disk, use it: otherwise, inject a ToolsJar # target to provide the dependency. javac_address = Address.parse(self._javac_spec) if not build_graph.contains_address(javac_address): build_graph.inject_synthetic_target(javac_address, ToolsJar)
def inject_address_closure(self, address): if self.contains_address(address): # The address was either mapped in or synthetically injected already. return if address in self._addresses_already_closed: # We've visited this address already in the course of the active recursive injection. return mapper = self._address_mapper target_address, target_addressable = mapper.resolve(address) self._addresses_already_closed.add(target_address) try: dep_addresses = list( mapper.specs_to_addresses( target_addressable.dependency_specs, relative_to=target_address.spec_path)) deps_seen = set() for dep_address in dep_addresses: if dep_address in deps_seen: raise self.DuplicateAddressError( "Addresses in dependencies must be unique. '{spec}' is referenced more than once." .format(spec=dep_address.spec)) deps_seen.add(dep_address) self.inject_address_closure(dep_address) if not self.contains_address(target_address): target = self._target_addressable_to_target( target_address, target_addressable) self.apply_injectables([target]) self.inject_target(target, dependencies=dep_addresses) else: for dep_address in dep_addresses: if dep_address not in self.dependencies_of(target_address): self.inject_dependency(target_address, dep_address) target = self.get_target(target_address) traversables = [ target.compute_dependency_specs(payload=target.payload) ] # Only poke `traversable_dependency_specs` if a concrete implementation is defined # in order to avoid spurious deprecation warnings. if type( target ).traversable_dependency_specs is not Target.traversable_dependency_specs: traversables.append(target.traversable_dependency_specs) for traversable_spec in itertools.chain(*traversables): traversable_address = Address.parse( traversable_spec, relative_to=target_address.spec_path) self.maybe_inject_address_closure(traversable_address) if not any(traversable_address == t.address for t in target.dependencies): self.inject_dependency(dependent=target.address, dependency=traversable_address) target.mark_transitive_invalidation_hash_dirty() traversables = [ target.compute_injectable_specs(payload=target.payload) ] # Only poke `traversable_specs` if a concrete implementation is defined # in order to avoid spurious deprecation warnings. if type(target).traversable_specs is not Target.traversable_specs: traversables.append(target.traversable_specs) for traversable_spec in itertools.chain(*traversables): traversable_address = Address.parse( traversable_spec, relative_to=target_address.spec_path) self.maybe_inject_address_closure(traversable_address) target.mark_transitive_invalidation_hash_dirty() except AddressLookupError as e: raise self.TransitiveLookupError( "{message}\n referenced from {spec}".format( message=e, spec=target_address.spec))
def test_address_no_name(self): config = Struct(address=Address.parse('a:b')) self.assertEqual('b', config.name)
def test_address_name_conflict(self): with self.assertRaises(ValidationError): Struct(name='a', address=Address.parse('a:b'))
def test_no_targets(self): self.add_to_build_file('empty/BUILD', 'pass') with self.assertRaises(AddressLookupError): self.build_graph.inject_address_closure(Address.parse('empty')) with self.assertRaises(AddressLookupError): self.build_graph.inject_address_closure(Address.parse('empty:foo'))
def binary_iter(): if self.payload.provides: for key, binary_spec in self.payload.provides.binaries.items(): address = Address.parse(binary_spec, relative_to=self.address.spec_path) yield (key, self._build_graph.get_target(address))
def get_target_from_spec(self, spec, relative_to=''): """Converts `spec` into an address and returns the result of `get_target` :API: public """ return self.get_target(Address.parse(spec, relative_to=relative_to))
def inject_address_closure(self, spec): self.build_graph.inject_address_closure(Address.parse(spec))
def test_equivalence(self) -> None: self.assertNotEqual("Not really an address", Address("a/b", "c")) self.assertEqual(Address("a/b", "c"), Address("a/b", "c")) self.assertEqual(Address("a/b", "c"), Address.parse("a/b:c")) self.assertEqual(Address.parse("a/b:c"), Address.parse("a/b:c"))
def setUp(self): build_root = os.path.join(os.path.dirname(__file__), 'examples', 'scheduler_inputs') self.spec_parser = CmdLineSpecParser(build_root) self.scheduler, storage = setup_json_scheduler(build_root) self.storage = storage self.engine = LocalSerialEngine(self.scheduler, storage) self.guava = Address.parse('3rdparty/jvm:guava') self.thrift = Address.parse('src/thrift/codegen/simple') self.java = Address.parse('src/java/codegen/simple') self.java_simple = Address.parse('src/java/simple') self.java_multi = Address.parse('src/java/multiple_classpath_entries') self.no_variant_thrift = Address.parse( 'src/java/codegen/selector:conflict') self.unconfigured_thrift = Address.parse( 'src/thrift/codegen/unconfigured') self.resources = Address.parse('src/resources/simple') self.consumes_resources = Address.parse('src/java/consumes_resources') self.consumes_managed_thirdparty = Address.parse( 'src/java/managed_thirdparty') self.managed_guava = Address.parse('3rdparty/jvm/managed:guava') self.managed_hadoop = Address.parse( '3rdparty/jvm/managed:hadoop-common') self.managed_resolve_latest = Address.parse( '3rdparty/jvm/managed:latest-hadoop') self.inferred_deps = Address.parse('src/scala/inferred_deps')