def test_sibling_references(self): with temporary_dir() as root_dir: buildfile = create_buildfile(root_dir, 'a', name='BUILD', content=dedent(""" dependencies(name='util', dependencies=[ jar(org='com.twitter', name='util', rev='0.0.1') ] ) """).strip() ) sibling = create_buildfile(root_dir, 'a', name='BUILD.sibling', content=dedent(""" dependencies(name='util-ex', dependencies=[ pants(':util'), jar(org='com.twitter', name='util-ex', rev='0.0.1') ] ) """).strip() ) ParseContext(buildfile).parse() utilex = Target.get(Address.parse(root_dir, 'a:util-ex', is_relative=False)) utilex_deps = set(utilex.resolve()) util = Target.get(Address.parse(root_dir, 'a:util', is_relative=False)) util_deps = set(util.resolve()) self.assertEquals(util_deps, util_deps.intersection(utilex_deps))
def parse_jarcoordinate(coordinate): components = coordinate.split('#', 1) if len(components) == 2: org, name = components return org, name else: try: address = Address.parse(get_buildroot(), coordinate) try: target = Target.get(address) if not target: siblings = Target.get_all_addresses( address.buildfile) prompt = 'did you mean' if len( siblings ) == 1 else 'maybe you meant one of these' raise TaskError('%s => %s?:\n %s' % (address, prompt, '\n '.join( str(a) for a in siblings))) if not target.is_exported: raise TaskError('%s is not an exported target' % coordinate) return target.provides.org, target.provides.name except (ImportError, SyntaxError, TypeError): raise TaskError('Failed to parse %s' % address.buildfile.relpath) except IOError: raise TaskError('No BUILD file could be found at %s' % coordinate)
def test_sibling_references(self): with temporary_dir() as root_dir: buildfile = create_buildfile(root_dir, 'a', name='BUILD', content=dedent(""" dependencies(name='util', dependencies=[ jar(org='com.twitter', name='util', rev='0.0.1') ] ) """).strip()) sibling = create_buildfile(root_dir, 'a', name='BUILD.sibling', content=dedent(""" dependencies(name='util-ex', dependencies=[ pants(':util'), jar(org='com.twitter', name='util-ex', rev='0.0.1') ] ) """).strip()) ParseContext(buildfile).parse() utilex = Target.get( Address.parse(root_dir, 'a:util-ex', is_relative=False)) utilex_deps = set(utilex.resolve()) util = Target.get( Address.parse(root_dir, 'a:util', is_relative=False)) util_deps = set(util.resolve()) self.assertEquals(util_deps, util_deps.intersection(utilex_deps))
def _coerce_to_targets(cls, from_str, to_str): if isinstance(from_str, Compatibility.string): if not isinstance(to_str, Compatibility.string): raise TaskError( 'Finding paths from string %s to non-string %s' % (from_str, str(to_str))) from_address = Address.parse(get_buildroot(), from_str) to_address = Address.parse(get_buildroot(), to_str) from_target = Target.get(from_address) to_target = Target.get(to_address) if not from_target: raise TaskError('Target %s doesn\'t exist' % from_address.reference()) if not to_target: raise TaskError('Target %s doesn\'t exist' % to_address.reference()) return from_target, to_target elif isinstance(to_str, Compatibility.string): raise TaskError('Finding paths from string %s to non-string %s' % (to_str, str(from_str))) return from_str, to_str
def __init__(self, name, sources=None, exclusives=None): Target.__init__(self, name, exclusives=exclusives) self.add_labels('sources') self.target_base = SourceRoot.find(self) self._unresolved_sources = sources or [] self._resolved_sources = None
def _owning_targets(self, file): for build_file in self._candidate_owners(file): is_build_file = (build_file.full_path == os.path.join(get_buildroot(), file)) for address in Target.get_all_addresses(build_file): target = Target.get(address) if target and (is_build_file or (has_sources(target) and self._owns(target, file))): yield target
def _owning_targets(self, path): for build_file in self._candidate_owners(path): is_build_file = (build_file.full_path == os.path.join(get_buildroot(), path)) for address in Target.get_all_addresses(build_file): target = Target.get(address) if target and (is_build_file or (target.has_sources() and self._owns(target, path))): yield target
def _find_targets(self): if len(self.context.target_roots) > 0: for target in self.context.target_roots: yield target else: for buildfile in BuildFile.scan_buildfiles(get_buildroot()): target_addresses = Target.get_all_addresses(buildfile) for target_address in target_addresses: yield Target.get(target_address)
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit( '.', 2) __import__('%s.%s' % (from_list, module), fromlist=[from_list]) except (ImportError, ValueError): # Fall back on pants provided target types. if hasattr(twitter.pants.base.build_file_context, dependees_type): type_name = getattr( twitter.pants.base.build_file_context, dependees_type) else: raise TaskError('Invalid type name: %s' % dependees_type) # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(type_name)) if not base_paths: raise TaskError( 'No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update( BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) dependees_by_target = defaultdict(set) for buildfile in buildfiles: for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependency = self.get_concrete_target( dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def __init__(self, name, url_builder, exclusives=None): """ :param string name: The name of this target, which combined with this build file defines the target :class:`twitter.pants.base.address.Address`. :param url_builder: Function that accepts a page target and an optional wiki config dict. :returns: A tuple of (alias, fully qualified url). """ Target.__init__(self, name, exclusives=exclusives) self.url_builder = url_builder
def _walk(self, walked, work, predicate=None): Target._walk(self, walked, work, predicate) for dep in self.dependencies: if isinstance(dep, Target) and not dep in walked: walked.add(dep) if not predicate or predicate(dep): additional_targets = work(dep) dep._walk(walked, work, predicate) if additional_targets: for additional_target in additional_targets: additional_target._walk(walked, work, predicate)
def __init__(self, name, username=None, password=None, exclusives=None): """ :param string name: The name of these credentials. :param username: Either a constant username value or else a callable that can fetch one. :type username: string or callable :param password: Either a constant password value or else a callable that can fetch one. :type password: string or callable """ Target.__init__(self, name, exclusives=exclusives) self._username = username if callable(username) else lambda: username self._password = password if callable(password) else lambda: password
def __init__(self, requirement, name=None, repository=None, version_filter=None, use_2to3=False, compatibility=None, exclusives=None): # TODO(wickman) Allow PythonRequirements to be specified using pip-style vcs or url identifiers, # e.g. git+https or just http://... self._requirement = Requirement.parse(requirement) self._repository = repository self._name = name or self._requirement.project_name self._use_2to3 = use_2to3 self._version_filter = version_filter or (lambda py, pl: True) # TODO(wickman) Unify this with PythonTarget .compatibility self.compatibility = compatibility or [''] Target.__init__(self, self._name, exclusives=exclusives)
def _parse_addresses(self, spec): if spec.endswith('::'): dir = self._get_dir(spec[:-len('::')]) for buildfile in BuildFile.scan_buildfiles(self._root_dir, os.path.join(self._root_dir, dir)): for address in Target.get_all_addresses(buildfile): yield address elif spec.endswith(':'): dir = self._get_dir(spec[:-len(':')]) for address in Target.get_all_addresses(BuildFile(self._root_dir, dir)): yield address else: yield Address.parse(self._root_dir, spec)
def configure_target(target): if target not in analyzed: analyzed.add(target) self.has_scala = not self.skip_scala and (self.has_scala or is_scala(target)) if isinstance(target, JavaLibrary) or isinstance( target, ScalaLibrary): # TODO(John Sirois): this does not handle test resources, make test resources 1st class # in ant build and punch this through to pants model resources = set() if target.resources: resources.update(target.resources) if resources: self.resource_extensions.update( Project.extract_resource_extensions(resources)) configure_source_sets( ExportableJvmLibrary.RESOURCES_BASE_DIR, resources, is_test=False) if target.sources: test = is_test(target) self.has_tests = self.has_tests or test configure_source_sets(target.target_base, target.sources, is_test=test) # Other BUILD files may specify sources in the same directory as this target. Those BUILD # files might be in parent directories (globs('a/b/*.java')) or even children directories if # this target globs children as well. Gather all these candidate BUILD files to test for # sources they own that live in the directories this targets sources live in. target_dirset = find_source_basedirs(target) candidates = Target.get_all_addresses(target.address.buildfile) for ancestor in target.address.buildfile.ancestors(): candidates.update(Target.get_all_addresses(ancestor)) for sibling in target.address.buildfile.siblings(): candidates.update(Target.get_all_addresses(sibling)) for descendant in target.address.buildfile.descendants(): candidates.update(Target.get_all_addresses(descendant)) def is_sibling(target): return source_target( target) and target_dirset.intersection( find_source_basedirs(target)) return filter( is_sibling, [Target.get(a) for a in candidates if a != target.address])
def _owning_targets(self, path): for build_file in self._candidate_owners(path): is_build_file = (build_file.full_path == os.path.join(get_buildroot(), path)) for address in Target.get_all_addresses(build_file): target = Target.get(address) # A synthesized target can never own permanent files on disk if target != target.derived_from: # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a user # vs. targets created by pants at runtime. continue if target and (is_build_file or ((target.has_sources() or target.has_resources) and self._owns(target, path))): yield target
def resolve(self): # De-reference this pants pointer to an actual parsed target. resolved = Target.get(self.address) if not resolved: raise TargetDefinitionException(self, '%s%s' % (self._DEFINITION_ERROR_MSG, self.address)) for dep in resolved.resolve(): yield dep
def _find_path(cls, from_target, to_target, log): from_target, to_target = cls._coerce_to_targets(from_target, to_target) log.debug('Looking for path from %s to %s' % (from_target.address.reference(), to_target.address.reference())) queue = [([from_target], 0)] while True: if not queue: print('no path found from %s to %s!' % (from_target.address.reference(), to_target.address.reference())) break path, indent = queue.pop(0) next_target = path[-1] if next_target in cls.examined_targets: continue cls.examined_targets.add(next_target) log.debug('%sexamining %s' % (' ' * indent, next_target)) if next_target == to_target: print('') for target in path: print('%s' % target.address.reference()) break if hasattr(next_target, 'dependency_addresses'): for address in next_target.dependency_addresses: dep = Target.get(address) queue.append((path + [dep], indent + 1))
def configure_project(self, targets, checkstyle_suppression_files, debug_port): jvm_targets = Target.extract_jvm_targets(targets) if self.intransitive: jvm_targets = set( self.context.target_roots).intersection(jvm_targets) project = Project(self.project_name, self.python, self.skip_java, self.skip_scala, get_buildroot(), checkstyle_suppression_files, debug_port, jvm_targets, not self.intransitive, self.context.new_workunit) if self.python: python_source_paths = self.context.config.getlist( 'ide', 'python_source_paths', default=[]) python_test_paths = self.context.config.getlist( 'ide', 'python_test_paths', default=[]) python_lib_paths = self.context.config.getlist('ide', 'python_lib_paths', default=[]) project.configure_python(python_source_paths, python_test_paths, python_lib_paths) extra_source_paths = self.context.config.getlist( 'ide', 'extra_jvm_source_paths', default=[]) extra_test_paths = self.context.config.getlist('ide', 'extra_jvm_test_paths', default=[]) all_targets = project.configure_jvm(extra_source_paths, extra_test_paths) return all_targets, project
def configure_project(self, targets, checkstyle_suppression_files, debug_port): jvm_targets = Target.extract_jvm_targets(targets) if self.intransitive: jvm_targets = set(self.context.target_roots).intersection(jvm_targets) project = Project(self.project_name, self.python, self.skip_java, self.skip_scala, get_buildroot(), checkstyle_suppression_files, debug_port, jvm_targets, not self.intransitive, self.context.new_workunit) if self.python: python_source_paths = self.context.config.getlist('ide', 'python_source_paths', default=[]) python_test_paths = self.context.config.getlist('ide', 'python_test_paths', default=[]) python_lib_paths = self.context.config.getlist('ide', 'python_lib_paths', default=[]) project.configure_python(python_source_paths, python_test_paths, python_lib_paths) extra_source_paths = self.context.config.getlist('ide', 'extra_jvm_source_paths', default=[]) extra_test_paths = self.context.config.getlist('ide', 'extra_jvm_test_paths', default=[]) all_targets = project.configure_jvm(extra_source_paths, extra_test_paths) return all_targets, project
def execute(self, targets): java_targets = filter(_is_java, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) egroups = self.context.products.get_data('exclusives_groups') group_id = egroups.get_group_key_for_target(java_targets[0]) for conf in self._confs: egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. exclusives_classpath = egroups.get_classpath_for_group(group_id) self.execute_single_compilation(vt, exclusives_classpath) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors)
def identify(self, targets): targets = list(targets) if len(targets) == 1 and hasattr(targets[0], 'provides') and targets[0].provides: return targets[0].provides.org, targets[0].provides.name else: return 'internal', Target.maybe_readable_identify(targets)
def __init__(self, context): ConsoleTask.__init__(self, context) self._print_uptodate = context.options.check_deps_print_uptodate self.repos = context.config.getdict('jar-publish', 'repos') self._artifacts_to_targets = {} all_addresses = ( address for buildfile in BuildFile.scan_buildfiles(get_buildroot()) for address in Target.get_all_addresses(buildfile)) for address in all_addresses: target = Target.get(address) if target.is_exported: provided_jar, _, _ = target.get_artifact_info() artifact = (provided_jar.org, provided_jar.name) if not artifact in self._artifacts_to_targets: self._artifacts_to_targets[artifact] = target
def _find_path(cls, from_target, to_target, log): from_target, to_target = cls._coerce_to_targets(from_target, to_target) log.debug( 'Looking for path from %s to %s' % (from_target.address.reference(), to_target.address.reference())) queue = [([from_target], 0)] while True: if not queue: print('no path found from %s to %s!' % (from_target.address.reference(), to_target.address.reference())) break path, indent = queue.pop(0) next_target = path[-1] if next_target in cls.examined_targets: continue cls.examined_targets.add(next_target) log.debug('%sexamining %s' % (' ' * indent, next_target)) if next_target == to_target: print('') for target in path: print('%s' % target.address.reference()) break if hasattr(next_target, 'dependency_addresses'): for address in next_target.dependency_addresses: dep = Target.get(address) queue.append((path + [dep], indent + 1))
def configure_target(target): if target not in analyzed: analyzed.add(target) self.has_scala = not self.skip_scala and (self.has_scala or is_scala(target)) if target.has_resources: resources_by_basedir = defaultdict(set) for resources in target.resources: resources_by_basedir[resources.target_base].update( resources.sources) for basedir, resources in resources_by_basedir.items(): self.resource_extensions.update( Project.extract_resource_extensions(resources)) configure_source_sets(basedir, resources, is_test=False) if target.sources: test = target.is_test self.has_tests = self.has_tests or test configure_source_sets(target.target_base, target.sources, is_test=test) # Other BUILD files may specify sources in the same directory as this target. Those BUILD # files might be in parent directories (globs('a/b/*.java')) or even children directories if # this target globs children as well. Gather all these candidate BUILD files to test for # sources they own that live in the directories this targets sources live in. target_dirset = find_source_basedirs(target) candidates = Target.get_all_addresses(target.address.buildfile) for ancestor in target.address.buildfile.ancestors(): candidates.update(Target.get_all_addresses(ancestor)) for sibling in target.address.buildfile.siblings(): candidates.update(Target.get_all_addresses(sibling)) for descendant in target.address.buildfile.descendants(): candidates.update(Target.get_all_addresses(descendant)) def is_sibling(target): return source_target( target) and target_dirset.intersection( find_source_basedirs(target)) return filter( is_sibling, [Target.get(a) for a in candidates if a != target.address])
def extra_products(self, target): ret = [] if target.is_apt and target.processors: root = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(root, JavaCompile._PROCESSOR_INFO_FILE) self._write_processor_info(processor_info_file, target.processors) ret.append((root, [processor_info_file])) return ret
def _addresses(self): if self.context.target_roots: for target in self.context.target_roots: yield target.address else: for buildfile in BuildFile.scan_buildfiles(self._root_dir): for address in Target.get_all_addresses(buildfile): yield address
def __getattr__(self, name): try: return Target.__getattribute__(self, name) except AttributeError as e: try: return getattr(self.get(), name) except (AttributeError, LookupError): raise e
def _owning_targets(self, path): for build_file in self._candidate_owners(path): is_build_file = (build_file.full_path == os.path.join( get_buildroot(), path)) for address in Target.get_all_addresses(build_file): target = Target.get(address) # A synthesized target can never own permanent files on disk if target != target.derived_from: # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a user # vs. targets created by pants at runtime. continue if target and (is_build_file or ((target.has_sources() or target.has_resources) and self._owns(target, path))): yield target
def target(cls, address): """Resolves the given target address to a Target object. address: The BUILD target address to resolve. Returns the corresponding Target or else None if the address does not point to a defined Target. """ return Target.get(Address.parse(cls.build_root, address, is_relative=False))
def console_output(self, _): dependees_by_target = defaultdict(set) for buildfile in BuildFile.scan_buildfiles(get_buildroot()): for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def _artifact_args(self, targets): """Returns the artifact paths for the given target set.""" artifact_id = Target.maybe_readable_identify(targets) # Each compilation must output to its own directory, so zinc can then associate those with the # appropriate analysis files of previous compilations. classes_dir = os.path.join(self._classes_dirs_base, artifact_id) analysis_file = os.path.join(self._analysis_files_base, artifact_id) + '.analysis' return artifact_id, classes_dir, analysis_file
def test_validation(self): with ParseContext.temp('JarLibraryTest/test_validation'): target = Target(name='mybird') JarLibrary(name="test", dependencies=target) self.assertRaises(TargetDefinitionException, JarLibrary, name="test1", dependencies=None)
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit('.', 2) __import__('%s.%s' % (from_list, module), fromlist=[from_list]) except (ImportError, ValueError): # Fall back on pants provided target types. if hasattr(twitter.pants.base.build_file_context, dependees_type): type_name = getattr(twitter.pants.base.build_file_context, dependees_type) else: raise TaskError('Invalid type name: %s' % dependees_type) # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(type_name)) if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) dependees_by_target = defaultdict(set) for buildfile in buildfiles: for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def test_validation(self): with ParseContext.temp('InternalTargetTest/test_validation'): InternalTarget(name="valid", dependencies=None) self.assertRaises(TargetDefinitionException, InternalTarget, name=1, dependencies=None) InternalTarget(name="valid2", dependencies=Target(name='mybird')) self.assertRaises(TargetDefinitionException, InternalTarget, name='valid3', dependencies=1)
def scan_addresses(root_dir, base_path=None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses
def parse(self, spec): """Parses the given target spec into one or more targets. Returns a generator of target, address pairs in which the target may be None if the address points to a non-existent target. """ for address in self._parse_addresses(spec): target = Target.get(address) yield target, address
def _get_target(address): try: address = Address.parse(get_buildroot(), address, is_relative=False) except IOError as e: raise TaskError('Failed to parse address: %s: %s' % (address, e)) match = Target.get(address) if not match: raise TaskError('Invalid target address: %s' % address) return match
def replace_targets(self, target_roots): """Replaces all targets in the context with the given roots and their transitive dependencies. """ self._target_roots = target_roots self._targets = OrderedSet() for target in target_roots: self.add_target(target) self.id = Target.identify(self._targets)
def create_output_paths(self, targets): compilation_id = Target.maybe_readable_identify(targets) # Each compilation must output to its own directory, so zinc can then associate those with the appropriate # analysis caches of previous compilations. output_dir = os.path.join(self._classes_dir, compilation_id) depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id) + '.analysis_cache' return output_dir, depfile, analysis_cache
def extra_products(self, target): ret = [] # TODO(John Sirois): Map target.resources in the same way. # 'Map' (rewrite) annotation processor service info files to the owning targets. if target.is_apt and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, JavaCompile._PROCESSOR_INFO_FILE) self._write_processor_info(processor_info_file, target.processors) ret.append((basedir, [processor_info_file])) return ret
def _output_paths(self, targets): """Returns the full paths to the classes dir, depfile and analysis file for the given target set.""" compilation_id = Target.maybe_readable_identify(targets) # Each compilation must output to its own directory, so zinc can then associate those with the appropriate # analysis files of previous compilations. classes_dir = os.path.join(self._classes_dir_base, compilation_id) depfile = os.path.join(self._depfiles_base, compilation_id) + '.dependencies' analysis_file = os.path.join(self._analysis_files_base, compilation_id) + '.analysis' return classes_dir, depfile, analysis_file
def parse_url(spec): match = MarkdownToHtml.PANTS_LINK.match(spec) if match: page = Target.get(Address.parse(get_buildroot(), match.group(1))) anchor = match.group(2) or '' if not page: raise TaskError('Invalid link %s' % match.group(1)) alias, url = url_builder(page, config=get_config(page)) return alias, url + anchor else: return spec, spec
def calculate_sources(self, targets): sources = defaultdict(set) def collect_sources(target): src = (os.path.join(target.target_base, source) for source in target.sources if source.endswith('.java')) if src: sources[target].update(src) for target in targets: collect_sources(target) return sources, Target.identify(targets)