def create_parser(defaults=None): """Creates a config parser that supports %([key-name])s value substitution. Any defaults supplied will act as if specified in the loaded config file's DEFAULT section and be available for substitutions. All of the following are seeded with defaults in the config user: the current user homedir: the current user's home directory buildroot: the root of this repo pants_bootstrapdir: the global pants scratch space primarily used for caches pants_supportdir: pants support files for this repo go here; for example: ivysettings.xml pants_distdir: user visible artifacts for this repo go here pants_workdir: the scratch space used to for live builds in this repo """ standard_defaults = dict( buildroot=get_buildroot(), homedir=os.path.expanduser('~'), user=getpass.getuser(), pants_bootstrapdir=os.path.expanduser('~/.pants.d'), pants_workdir=os.path.join(get_buildroot(), '.pants.d'), pants_supportdir=os.path.join(get_buildroot(), 'build-support'), pants_distdir=os.path.join(get_buildroot(), 'dist') ) if defaults: standard_defaults.update(defaults) return ConfigParser.SafeConfigParser(standard_defaults)
def create_parser(defaults=None): """Creates a config parser that supports %([key-name])s value substitution. Any defaults supplied will act as if specified in the loaded config file's DEFAULT section and be available for substitutions. All of the following are seeded with defaults in the config user: the current user homedir: the current user's home directory buildroot: the root of this repo pants_bootstrapdir: the global pants scratch space primarily used for caches pants_supportdir: pants support files for this repo go here; for example: ivysettings.xml pants_distdir: user visible artifacts for this repo go here pants_workdir: the scratch space used to for live builds in this repo """ standard_defaults = dict( buildroot=get_buildroot(), homedir=os.path.expanduser('~'), user=getpass.getuser(), pants_bootstrapdir=os.path.expanduser('~/.pants.d'), pants_workdir=os.path.join(get_buildroot(), '.pants.d'), pants_supportdir=os.path.join(get_buildroot(), 'build-support'), pants_distdir=os.path.join(get_buildroot(), 'dist')) if defaults: standard_defaults.update(defaults) return ConfigParser.SafeConfigParser(standard_defaults)
def configure_python(self, source_roots, test_roots, lib_roots): self.py_sources.extend(SourceSet(get_buildroot(), root, None, False) for root in source_roots) self.py_sources.extend(SourceSet(get_buildroot(), root, None, True) for root in test_roots) for root in lib_roots: for path in os.listdir(os.path.join(get_buildroot(), root)): if os.path.isdir(os.path.join(get_buildroot(), root, path)) or path.endswith('.egg'): self.py_libs.append(SourceSet(get_buildroot(), root, path, False))
def test_multiple_source_root(self): with register_sourceroot() as sourceroot: sourceroot(os.path.join(get_buildroot(), "fakerootA"), ListRootsTest.TypeA) sourceroot(os.path.join(get_buildroot(), "fakerootB"), ListRootsTest.TypeB) self.assert_console_output('fakerootA: TypeA', 'fakerootB: TypeB')
def _coerce_to_targets(cls, from_str, to_str): if isinstance(from_str, Compatibility.string): if not isinstance(to_str, Compatibility.string): raise TaskError( 'Finding paths from string %s to non-string %s' % (from_str, str(to_str))) from_address = Address.parse(get_buildroot(), from_str) to_address = Address.parse(get_buildroot(), to_str) from_target = Target.get(from_address) to_target = Target.get(to_address) if not from_target: raise TaskError('Target %s doesn\'t exist' % from_address.reference()) if not to_target: raise TaskError('Target %s doesn\'t exist' % to_address.reference()) return from_target, to_target elif isinstance(to_str, Compatibility.string): raise TaskError('Finding paths from string %s to non-string %s' % (to_str, str(from_str))) return from_str, to_str
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit( '.', 2) __import__('%s.%s' % (from_list, module), fromlist=[from_list]) except (ImportError, ValueError): # Fall back on pants provided target types. if hasattr(twitter.pants.base.build_file_context, dependees_type): type_name = getattr( twitter.pants.base.build_file_context, dependees_type) else: raise TaskError('Invalid type name: %s' % dependees_type) # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(type_name)) if not base_paths: raise TaskError( 'No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update( BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) dependees_by_target = defaultdict(set) for buildfile in buildfiles: for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependency = self.get_concrete_target( dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def _tempname(self): # don't assume the user's cwd is buildroot buildroot = get_buildroot() fallback = os.path.join(get_buildroot(), '.pants.d') pants_workdir = self.context.config.getdefault('pants_workdir', default=fallback) tmp_dir = os.path.join(pants_workdir, 'tmp') safe_mkdir(tmp_dir) fd, path = tempfile.mkstemp(dir=tmp_dir, prefix='') os.close(fd) return path
def configure_python(self, source_roots, test_roots, lib_roots): self.py_sources.extend( SourceSet(get_buildroot(), root, None, False) for root in source_roots) self.py_sources.extend( SourceSet(get_buildroot(), root, None, True) for root in test_roots) for root in lib_roots: for path in os.listdir(os.path.join(get_buildroot(), root)): if os.path.isdir(os.path.join(get_buildroot(), root, path)) or path.endswith('.egg'): self.py_libs.append( SourceSet(get_buildroot(), root, path, False))
def sourcejar(self, jvm_targets, add_genjar): for target in jvm_targets: jar_name = jarname(target, '-sources.jar') add_genjar(target, jar_name) jar_path = os.path.join(self._output_dir, jar_name) with self.create_jar(target, jar_path) as jar: for source in target.sources: jar.write(os.path.join(get_buildroot(), target.target_base, source), source) if target.has_resources: for resources in target.resources: for resource in resources.sources: jar.write(os.path.join(get_buildroot(), resources.target_base, resource), resource)
def _run_thrift(self, source, bases): thrift_file = source thrift_abs_path = os.path.abspath(os.path.join(self.root, thrift_file)) args = [ select_thrift_binary(self.config), '--gen', 'py:new_style', '-recurse', '-o', self.codegen_root ] # Add bases as include paths to try. Note that include paths and compile targets # should be uniformly relative, or uniformly absolute (in this case the latter). for base in bases: args.extend(('-I', os.path.join(get_buildroot(), base))) args.append(thrift_abs_path) po = subprocess.Popen(args, cwd=self.chroot.path()) rv = po.wait() if rv != 0: comm = po.communicate() print('thrift generation failed!', file=sys.stderr) print('STDOUT', file=sys.stderr) print(comm[0], file=sys.stderr) print('STDERR', file=sys.stderr) print(comm[1], file=sys.stderr) return rv == 0
def run_server(reporting_queue): def report_launch(actual_port): reporting_queue.put( 'Launching server with pid %d at http://localhost:%d' % (os.getpid(), actual_port)) def done_reporting(): reporting_queue.put(DONE) try: # We mustn't block in the child, because the multiprocessing module enforces that the # parent either kills or joins to it. Instead we fork a grandchild that inherits the queue # but is allowed to block indefinitely on the server loop. if not os.fork(): # Child process. info_dir = RunInfo.dir(self.context.config) # If these are specified explicitly in the config, use those. Otherwise # they will be None, and we'll use the ones baked into this package. template_dir = self.context.config.get('reporting', 'reports_template_dir') assets_dir = self.context.config.get('reporting', 'reports_assets_dir') settings = ReportingServer.Settings(info_dir=info_dir, template_dir=template_dir, assets_dir=assets_dir, root=get_buildroot(), allowed_clients=self.context.options.allowed_clients) server = ReportingServer(self.context.options.port, settings) actual_port = server.server_port() ReportingServerManager.save_current_server_port(actual_port) report_launch(actual_port) done_reporting() # Block forever here. server.start() except socket.error: done_reporting() raise
def __init__(self, context, **kwargs): super(ListTargets, self).__init__(context, **kwargs) self._provides = context.options.list_provides self._provides_columns = context.options.list_provides_columns self._documented = context.options.list_documented self._root_dir = get_buildroot()
def make_build_properties(cls): pi = PythonInterpreter() base_info = { 'class': pi.identity().interpreter, 'version': pi.identity().version, 'platform': get_platform(), } try: from twitter.pants.base.build_environment import get_buildroot, get_scm buildroot = get_buildroot() scm = get_scm() now = localtime() if scm: revision = scm.commit_id tag = scm.tag_name or 'none' branchname = scm.branch_name or revision else: revision = 'unknown' tag = 'none' branchname = 'unknown' base_info.update({ 'date': strftime('%A %b %d, %Y', now), 'time': strftime('%H:%M:%S', now), 'timestamp': strftime('%m.%d.%Y %H:%M', now), 'branch': branchname, 'tag': tag, 'sha': revision, 'user': getpass.getuser(), 'machine': socket.gethostname(), 'path': buildroot }) except ImportError: pass return base_info
def _register(cls, source_root_dir, *allowed_target_types): """Registers a source root. :source_root_dir The source root directory against which we resolve source paths, relative to the build root. :allowed_target_types Optional list of target types. If specified, we enforce that only targets of those types appear under this source root. """ # Verify that source_root_dir doesn't reach outside buildroot. buildroot = get_buildroot() if source_root_dir.startswith(buildroot): abspath = os.path.normpath(source_root_dir) else: abspath = os.path.normpath(os.path.join(buildroot, source_root_dir)) if not abspath.startswith(buildroot): raise ValueError('Source root %s is not under the build root %s' % (abspath, buildroot)) source_root_dir = os.path.relpath(abspath, buildroot) types = cls._TYPES_BY_ROOT.get(source_root_dir) if types is None: types = OrderedSet() cls._TYPES_BY_ROOT[source_root_dir] = types for allowed_target_type in allowed_target_types: types.add(allowed_target_type) roots = cls._ROOTS_BY_TYPE.get(allowed_target_type) if roots is None: roots = OrderedSet() cls._ROOTS_BY_TYPE[allowed_target_type] = roots roots.add(source_root_dir)
def split(self, splits, catchall=False): buildroot = get_buildroot() src_to_split_idx = {} for i, split in enumerate(splits): for s in split: src_to_split_idx[s if os.path.isabs(s) else os.path.join(buildroot, s)] = i num_outputs = len(splits) + 1 if catchall else len(splits) catchall_idx = len(splits) if catchall else -1 split_pcd_entries = [] split_src_to_deps = [] for _ in xrange(0, num_outputs): split_pcd_entries.append([]) split_src_to_deps.append({}) for pcd_entry in self.pcd_entries: split_idx = src_to_split_idx.get(pcd_entry[1], catchall_idx) if split_idx != -1: split_pcd_entries[split_idx].append(pcd_entry) for src, deps in self.src_to_deps.items(): split_idx = src_to_split_idx.get(src, catchall_idx) if split_idx != -1: split_src_to_deps[split_idx][src] = deps return [JMakeAnalysis(x, y) for x, y in zip(split_pcd_entries, split_src_to_deps)]
def __init__(self, configparser): self.configparser = configparser # Overrides # # This feature allows a second configuration file which will override # pants.ini to be specified. The file is currently specified via an env # variable because the cmd line flags are parsed after config is loaded. # # The main use of the extra file is to have different settings based on # the environment. For example, the setting used to compile or locations # of caches might be different between a developer's local environment # and the environment used to build and publish artifacts (e.g. Jenkins) # # The files cannot reference each other's values, so make sure each one is # internally consistent self.overrides_path = os.environ.get('PANTS_CONFIG_OVERRIDE') self.overrides_parser = None if self.overrides_path is not None: self.overrides_path = os.path.join(get_buildroot(), self.overrides_path) self.overrides_parser = Config.create_parser() with open(self.overrides_path) as o_ini: self.overrides_parser.readfp(o_ini, filename=self.overrides_path)
def parse_jarcoordinate(coordinate): components = coordinate.split('#', 1) if len(components) == 2: org, name = components return org, name else: try: address = Address.parse(get_buildroot(), coordinate) try: target = Target.get(address) if not target: siblings = Target.get_all_addresses( address.buildfile) prompt = 'did you mean' if len( siblings ) == 1 else 'maybe you meant one of these' raise TaskError('%s => %s?:\n %s' % (address, prompt, '\n '.join( str(a) for a in siblings))) if not target.is_exported: raise TaskError('%s is not an exported target' % coordinate) return target.provides.org, target.provides.name except (ImportError, SyntaxError, TypeError): raise TaskError('Failed to parse %s' % address.buildfile.relpath) except IOError: raise TaskError('No BUILD file could be found at %s' % coordinate)
def find_plugins(self, plugin_names): """Returns a map from plugin name to plugin jar.""" plugin_names = set(plugin_names) plugins = {} buildroot = get_buildroot() # plugin_jars is the universe of all possible plugins and their transitive deps. # Here we select the ones to actually use. for jar in self.plugin_jars(): with open_jar(jar, 'r') as jarfile: try: with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file: plugin_info = ElementTree.parse(plugin_info_file).getroot() if plugin_info.tag != 'plugin': raise TaskError( 'File %s in %s is not a valid scalac plugin descriptor' % (_PLUGIN_INFO_FILE, jar)) name = plugin_info.find('name').text if name in plugin_names: if name in plugins: raise TaskError('Plugin %s defined in %s and in %s' % (name, plugins[name], jar)) # It's important to use relative paths, as the compiler flags get embedded in the zinc # analysis file, and we port those between systems via the artifact cache. plugins[name] = os.path.relpath(jar, buildroot) except KeyError: pass unresolved_plugins = plugin_names - set(plugins.keys()) if unresolved_plugins: raise TaskError('Could not find requested plugins: %s' % list(unresolved_plugins)) return plugins
def __init__(self, context, nailgun_task, jvm_args, color, bootstrap_utils): self.context = context self._nailgun_task = nailgun_task # We run zinc on this task's behalf. self._jvm_args = jvm_args self._color = color self._bootstrap_utils = bootstrap_utils self._pants_home = get_buildroot() # The target scala version. self._compile_bootstrap_key = 'scalac' compile_bootstrap_tools = context.config.getlist('scala-compile', 'compile-bootstrap-tools', default=[':scala-compile-2.9.3']) self._bootstrap_utils.register_jvm_build_tools(self._compile_bootstrap_key, compile_bootstrap_tools) # The zinc version (and the scala version it needs, which may differ from the target version). self._zinc_bootstrap_key = 'zinc' zinc_bootstrap_tools = context.config.getlist('scala-compile', 'zinc-bootstrap-tools', default=[':zinc']) self._bootstrap_utils.register_jvm_build_tools(self._zinc_bootstrap_key, zinc_bootstrap_tools) # Compiler plugins. plugins_bootstrap_tools = context.config.getlist('scala-compile', 'scalac-plugin-bootstrap-tools', default=[]) if plugins_bootstrap_tools: self._plugins_bootstrap_key = 'plugins' self._bootstrap_utils.register_jvm_build_tools(self._plugins_bootstrap_key, plugins_bootstrap_tools) else: self._plugins_bootstrap_key = None self._main = context.config.get('scala-compile', 'main') # For localizing/relativizing analysis files. self._java_home = context.java_home self._ivy_home = context.ivy_home
def _owning_targets(self, path): for build_file in self._candidate_owners(path): is_build_file = (build_file.full_path == os.path.join(get_buildroot(), path)) for address in Target.get_all_addresses(build_file): target = Target.get(address) if target and (is_build_file or (target.has_sources() and self._owns(target, path))): yield target
def configure_project(self, targets, checkstyle_suppression_files, debug_port): jvm_targets = Target.extract_jvm_targets(targets) if self.intransitive: jvm_targets = set( self.context.target_roots).intersection(jvm_targets) project = Project(self.project_name, self.python, self.skip_java, self.skip_scala, get_buildroot(), checkstyle_suppression_files, debug_port, jvm_targets, not self.intransitive, self.context.new_workunit) if self.python: python_source_paths = self.context.config.getlist( 'ide', 'python_source_paths', default=[]) python_test_paths = self.context.config.getlist( 'ide', 'python_test_paths', default=[]) python_lib_paths = self.context.config.getlist('ide', 'python_lib_paths', default=[]) project.configure_python(python_source_paths, python_test_paths, python_lib_paths) extra_source_paths = self.context.config.getlist( 'ide', 'extra_jvm_source_paths', default=[]) extra_test_paths = self.context.config.getlist('ide', 'extra_jvm_test_paths', default=[]) all_targets = project.configure_jvm(extra_source_paths, extra_test_paths) return all_targets, project
def split(self, splits, catchall=False): buildroot = get_buildroot() src_to_split_idx = {} for i, split in enumerate(splits): for s in split: src_to_split_idx[s if os.path.isabs(s) else os.path. join(buildroot, s)] = i num_outputs = len(splits) + 1 if catchall else len(splits) catchall_idx = len(splits) if catchall else -1 split_pcd_entries = [] split_src_to_deps = [] for _ in xrange(0, num_outputs): split_pcd_entries.append([]) split_src_to_deps.append({}) for pcd_entry in self.pcd_entries: split_idx = src_to_split_idx.get(pcd_entry[1], catchall_idx) if split_idx != -1: split_pcd_entries[split_idx].append(pcd_entry) for src, deps in self.src_to_deps.items(): split_idx = src_to_split_idx.get(src, catchall_idx) if split_idx != -1: split_src_to_deps[split_idx][src] = deps return [ JMakeAnalysis(x, y) for x, y in zip(split_pcd_entries, split_src_to_deps) ]
def configure_project(self, targets, checkstyle_suppression_files, debug_port): jvm_targets = Target.extract_jvm_targets(targets) if self.intransitive: jvm_targets = set(self.context.target_roots).intersection(jvm_targets) project = Project(self.project_name, self.python, self.skip_java, self.skip_scala, get_buildroot(), checkstyle_suppression_files, debug_port, jvm_targets, not self.intransitive, self.context.new_workunit) if self.python: python_source_paths = self.context.config.getlist('ide', 'python_source_paths', default=[]) python_test_paths = self.context.config.getlist('ide', 'python_test_paths', default=[]) python_lib_paths = self.context.config.getlist('ide', 'python_lib_paths', default=[]) project.configure_python(python_source_paths, python_test_paths, python_lib_paths) extra_source_paths = self.context.config.getlist('ide', 'extra_jvm_source_paths', default=[]) extra_test_paths = self.context.config.getlist('ide', 'extra_jvm_test_paths', default=[]) all_targets = project.configure_jvm(extra_source_paths, extra_test_paths) return all_targets, project
def create_binary(self, binary): import platform safe_mkdir(self.outdir) jarmap = self.context.products.get('jars') binary_jarname = '%s.jar' % binary.basename binaryjarpath = os.path.join(self.outdir, binary_jarname) self.context.log.info('creating %s' % os.path.relpath(binaryjarpath, get_buildroot())) with open_jar(binaryjarpath, 'w', compression=self.compression, allowZip64=self.zip64) as jar: def add_jars(target): generated = jarmap.get(target) if generated: for basedir, jars in generated.items(): for internaljar in jars: self.dump(os.path.join(basedir, internaljar), jar) binary.walk(add_jars, lambda t: t.is_internal) if self.deployjar: for basedir, externaljar in self.list_jar_dependencies(binary): self.dump(os.path.join(basedir, externaljar), jar) manifest = Manifest() manifest.addentry(Manifest.MANIFEST_VERSION, '1.0') manifest.addentry( Manifest.CREATED_BY, 'python %s pants %s (Twitter, Inc.)' % (platform.python_version(), get_version()) ) main = binary.main or '*** java -jar not supported, please use -cp and pick a main ***' manifest.addentry(Manifest.MAIN_CLASS, main) jar.writestr(Manifest.PATH, manifest.contents()) jarmap.add(binary, self.outdir, [binary_jarname])
def find(target): """Finds the source root for the given target. If none is registered, the parent directory of the target's BUILD file is returned. """ target_path = os.path.relpath(target.address.buildfile.parent_path, get_buildroot()) def _find(): for typ in target.__class__.mro(): for root in SourceRoot._ROOTS_BY_TYPE.get(typ, ()): if target_path.startswith(root): return root # Try already registered roots root = _find() if root: return root # Fall back to searching the ancestor path for a root for buildfile in reversed(target.address.buildfile.ancestors()): if buildfile not in SourceRoot._SEARCHED: SourceRoot._SEARCHED.add(buildfile) ParseContext(buildfile).parse() root = _find() if root: return root # Finally, resolve files relative to the BUILD file parent dir as the target base return target_path
def __init__(self, configparser, configpath): # Base Config self.configparser = configparser with open(configpath) as ini: self.configparser.readfp(ini, filename=configpath) self.file = configpath # Overrides # # This feature allows a second configuration file which will override # pants.ini to be specified. The file is currently specified via an env # variable because the cmd line flags are parsed after config is loaded. # # The main use of the extra file is to have different settings based on # the environment. For example, the setting used to compile or locations # of caches might be different between a developer's local environment # and the environment used to build and publish artifacts (e.g. Jenkins) # # The files cannot reference each other's values, so make sure each one is # internally consistent self.overrides_path = os.environ.get('PANTS_CONFIG_OVERRIDE') self.overrides_parser = None if self.overrides_path is not None: self.overrides_path = os.path.join(get_buildroot(), self.overrides_path) self.overrides_parser = Config.create_parser() with open(self.overrides_path) as o_ini: self.overrides_parser.readfp(o_ini, filename=self.overrides_path)
def console_output(self, _): buildfiles = OrderedSet() if self._dependees_type: base_paths = OrderedSet() for dependees_type in self._dependees_type: try: # Try to do a fully qualified import 1st for filtering on custom types. from_list, module, type_name = dependees_type.rsplit('.', 2) __import__('%s.%s' % (from_list, module), fromlist=[from_list]) except (ImportError, ValueError): # Fall back on pants provided target types. if hasattr(twitter.pants.base.build_file_context, dependees_type): type_name = getattr(twitter.pants.base.build_file_context, dependees_type) else: raise TaskError('Invalid type name: %s' % dependees_type) # Find the SourceRoot for the given input type base_paths.update(SourceRoot.roots(type_name)) if not base_paths: raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type + '\nPlease define a source root in BUILD file as:' + '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type)) for base_path in base_paths: buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(), base_path)) else: buildfiles = BuildFile.scan_buildfiles(get_buildroot()) dependees_by_target = defaultdict(set) for buildfile in buildfiles: for address in Target.get_all_addresses(buildfile): for target in Target.get(address).resolve(): # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a # user vs. targets created by pants at runtime. target = self.get_concrete_target(target) if hasattr(target, 'dependencies'): for dependencies in target.dependencies: for dependency in dependencies.resolve(): dependency = self.get_concrete_target(dependency) dependees_by_target[dependency].add(target) roots = set(self.context.target_roots) if self._closed: for root in roots: yield str(root.address) for dependant in self.get_dependants(dependees_by_target, roots): yield str(dependant.address)
def outdir(self): pants_workdir_fallback = os.path.join(get_buildroot(), '.pants.d') workdir_fallback = os.path.join(self.context.config.getdefault('pants_workdir', default=pants_workdir_fallback), self.name) outdir = (self.context.options.scrooge_gen_create_outdir or self.context.config.get(self.config_section, 'workdir', default=workdir_fallback)) return os.path.relpath(outdir)
def add_resource_paths(predicate): bases = set() for target in self.context.targets(): if predicate(target): if target.target_base not in bases: sibling_resources_base = os.path.join(os.path.dirname(target.target_base), 'resources') classpath.append(os.path.join(get_buildroot(), sibling_resources_base)) bases.add(target.target_base)
def sources_absolute_paths(self): """Returns the absolute paths of this target's sources. Prefer this over .sources unless you need to know about the target_base. """ abs_target_base = os.path.join(get_buildroot(), self.target_base) for src in self.sources: yield os.path.join(abs_target_base, src)
def create_parser(defaults=None): """ Creates a config parser that supports %([key-name])s value substitution. Any defaults supplied will act as if specified in the loaded config file's DEFAULT section and be available for substitutions. The 'buildroot', invoking 'user' and invoking user's 'homedir' are automatically defaulted. """ standard_defaults = dict( buildroot=get_buildroot(), homedir=os.path.expanduser('~'), user=getpass.getuser(), pants_workdir=os.path.join(get_buildroot(), '.pants.d'), pants_supportdir=os.path.join(get_buildroot(), 'build-support'), pants_distdir=os.path.join(get_buildroot(), 'dist')) if defaults: standard_defaults.update(defaults) return ConfigParser.SafeConfigParser(standard_defaults)
def load(configpath=os.path.join(get_buildroot(), 'pants.ini'), defaults=None): """ Loads a Config from the given path, by default the path to the pants.ini file in the current build root directory. Any defaults supplied will act as if specified in the loaded config file's DEFAULT section. The 'buildroot', invoking 'user' and invoking user's 'homedir' are automatically defaulted. """ return Config(Config.create_parser(defaults), configpath)
def _candidate_owners(self, path): build_file = BuildFile(get_buildroot(), relpath=os.path.dirname(path), must_exist=False) if build_file.exists(): yield build_file for sibling in build_file.siblings(): yield sibling for ancestor in build_file.ancestors(): yield ancestor
def __init__(self, basedir, *types, **kwargs): """Initializes a source root at basedir for the given target types. :basedir The base directory to resolve sources relative to :types The target types to register :basedir: as a source root for """ reldir = kwargs.pop('reldir', get_buildroot()) basepath = os.path.abspath(os.path.join(reldir, basedir)) if get_buildroot() != os.path.commonprefix((basepath, get_buildroot())): raise ValueError('The supplied basedir %s is not a sub-path of the project root %s' % ( basepath, get_buildroot() )) self.basedir = os.path.relpath(basepath, get_buildroot()) self.types = types SourceRoot._register(self)
def _cautious_rmtree(root): real_buildroot = os.path.realpath(os.path.abspath(get_buildroot())) real_root = os.path.realpath(os.path.abspath(root)) if not real_root.startswith(real_buildroot): raise TaskError( 'DANGER: Attempting to delete %s, which is not under the build root!' ) safe_rmtree(real_root)
def _run(): version = get_version() if len(sys.argv) == 2 and sys.argv[1] == _VERSION_OPTION: _do_exit(version) root_dir = get_buildroot() if not os.path.exists(root_dir): _exit_and_fail('PANTS_BUILD_ROOT does not point to a valid path: %s' % root_dir) if len(sys.argv) < 2 or (len(sys.argv) == 2 and sys.argv[1] in _HELP_ALIASES): _help(version, root_dir) command_class, command_args = _parse_command(root_dir, sys.argv[1:]) parser = optparse.OptionParser(version=version) RcFile.install_disable_rc_option(parser) parser.add_option(_LOG_EXIT_OPTION, action='store_true', default=False, dest='log_exit', help = 'Log an exit message on success or failure.') config = Config.load() run_tracker = RunTracker(config) report = initial_reporting(config, run_tracker) run_tracker.start(report) url = run_tracker.run_info.get_info('report_url') if url: run_tracker.log(Report.INFO, 'See a report at: %s' % url) else: run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') command = command_class(run_tracker, root_dir, parser, command_args) try: if command.serialized(): def onwait(pid): print('Waiting on pants process %s to complete' % _process_info(pid), file=sys.stderr) return True runfile = os.path.join(root_dir, '.pants.run') lock = Lock.acquire(runfile, onwait=onwait) else: lock = Lock.unlocked() try: result = command.run(lock) _do_exit(result) except KeyboardInterrupt: command.cleanup() raise finally: lock.release() finally: run_tracker.end() # Must kill nailguns only after run_tracker.end() is called, because there may still # be pending background work that needs a nailgun. if (hasattr(command.options, 'cleanup_nailguns') and command.options.cleanup_nailguns) \ or config.get('nailgun', 'autokill', default=False): NailgunTask.killall(None)
def _find_targets(self): if len(self.context.target_roots) > 0: for target in self.context.target_roots: yield target else: for buildfile in BuildFile.scan_buildfiles(get_buildroot()): target_addresses = Target.get_all_addresses(buildfile) for target_address in target_addresses: yield Target.get(target_address)
def create_parser(defaults=None): """ Creates a config parser that supports %([key-name])s value substitution. Any defaults supplied will act as if specified in the loaded config file's DEFAULT section and be available for substitutions. The 'buildroot', invoking 'user' and invoking user's 'homedir' are automatically defaulted. """ standard_defaults = dict( buildroot=get_buildroot(), homedir=os.path.expanduser('~'), user=getpass.getuser(), pants_workdir=os.path.join(get_buildroot(), '.pants.d'), pants_supportdir=os.path.join(get_buildroot(), 'build-support'), pants_distdir=os.path.join(get_buildroot(), 'dist') ) if defaults: standard_defaults.update(defaults) return ConfigParser.SafeConfigParser(standard_defaults)
def _get_target(address): try: address = Address.parse(get_buildroot(), address, is_relative=False) except IOError as e: raise TaskError('Failed to parse address: %s: %s' % (address, e)) match = Target.get(address) if not match: raise TaskError('Invalid target address: %s' % address) return match
def genlang(self, lang, targets): bases, sources = calculate_compile_roots(targets, self.is_gentarget) if lang == 'java': gen = self.gen_java.gen elif lang == 'python': gen = self.gen_python.gen else: raise TaskError('Unrecognized thrift gen lang: %s' % lang) args = [ self.thrift_binary, '--gen', gen, '-recurse', ] if self.strict: args.append('-strict') if self.verbose: args.append('-verbose') for base in bases: args.extend(('-I', base)) sessions = [] for source in sources: self.context.log.info('Generating thrift for %s\n' % source) # Create a unique session dir for this thrift root. Sources may be full paths but we only # need the path relative to the build root to ensure uniqueness. # TODO(John Sirois): file paths should be normalized early on and uniformly, fix the need to # relpath here at all. relsource = os.path.relpath(source, get_buildroot()) outdir = os.path.join(self.session_dir, '.'.join(relsource.split(os.path.sep))) safe_mkdir(outdir) cmd = args[:] cmd.extend(('-o', outdir)) cmd.append(source) log.debug('Executing: %s' % ' '.join(cmd)) sessions.append( self.ThriftSession(outdir, cmd, subprocess.Popen(cmd))) result = 0 for session in sessions: if result != 0: session.process.kill() else: result = session.process.wait() if result != 0: self.context.log.error('Failed: %s' % ' '.join(session.cmd)) else: _copytree(session.outdir, self.combined_dir) if result != 0: raise TaskError('%s ... exited non-zero (%i)' % (self.thrift_binary, result))
def _get_resource_extensions(self, project): resource_extensions = set() resource_extensions.update(project.resource_extensions) # TODO(John Sirois): make test resources 1st class in ant build and punch this through to pants # model for _, _, files in os.walk(os.path.join(get_buildroot(), 'tests', 'resources')): resource_extensions.update(Project.extract_resource_extensions(files)) return resource_extensions
def outdir(self): pants_workdir_fallback = os.path.join(get_buildroot(), '.pants.d') workdir_fallback = os.path.join( self.context.config.getdefault('pants_workdir', default=pants_workdir_fallback), self.name) outdir = (self.context.options.scrooge_gen_create_outdir or self.context.config.get(self.config_section, 'workdir', default=workdir_fallback)) return os.path.relpath(outdir)
def classnames_from_source_file(self, srcfile): relsrc = os.path.relpath(srcfile, get_buildroot()) if os.path.isabs(srcfile) else srcfile source_products = self.context.products.get_data('classes_by_source').get(relsrc) if not source_products: # It's valid - if questionable - to have a source file with no classes when, for # example, the source file has all its code commented out. self.context.log.warn('Source file %s generated no classes' % srcfile) else: for _, classes in source_products.rel_paths(): for cls in classes: yield JUnitRun.classfile_to_classname(cls)
def add_resource_paths(predicate): bases = set() for target in self.context.targets(): if predicate(target): if target.target_base not in bases: sibling_resources_base = os.path.join( os.path.dirname(target.target_base), 'resources') classpath.append( os.path.join(get_buildroot(), sibling_resources_base)) bases.add(target.target_base)
def parse_url(spec): match = MarkdownToHtml.PANTS_LINK.match(spec) if match: page = Target.get( Address.parse(get_buildroot(), match.group(1))) anchor = match.group(2) or '' if not page: raise TaskError('Invalid link %s' % match.group(1)) alias, url = url_builder(page, config=get_config(page)) return alias, url + anchor else: return spec, spec
def load(configpath=None, defaults=None): """ Loads a Config from the given path, by default the path to the pants.ini file in the current build root directory. Any defaults supplied will act as if specified in the loaded config file's DEFAULT section. The 'buildroot', invoking 'user' and invoking user's 'homedir' are automatically defaulted. """ configpath = configpath or os.path.join(get_buildroot(), 'pants.ini') parser = Config.create_parser(defaults=defaults) with open(configpath) as ini: parser.readfp(ini) return Config(parser)
def check(self, srcs, actual_deps): """Check for missing deps. See docstring for _compute_missing_deps for details. """ if self._check_missing_deps or self._check_missing_direct_deps or self._check_unnecessary_deps: missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps = \ self._compute_missing_deps(srcs, actual_deps) buildroot = get_buildroot() def shorten(path): # Make the output easier to read. for prefix in [buildroot, self._context.ivy_home]: if path.startswith(prefix): return os.path.relpath(path, prefix) return path if self._check_missing_deps and (missing_file_deps or missing_tgt_deps): for (tgt_pair, evidence) in missing_tgt_deps: evidence_str = '\n'.join([ ' %s uses %s' % (shorten(e[0]), shorten(e[1])) for e in evidence ]) self._context.log.error( 'Missing BUILD dependency %s -> %s because:\n%s' % (tgt_pair[0].address.reference(), tgt_pair[1].address.reference(), evidence_str)) for (src_tgt, dep) in missing_file_deps: self._context.log.error( 'Missing BUILD dependency %s -> %s' % (src_tgt.address.reference(), shorten(dep))) if self._check_missing_deps == 'fatal': raise TaskError('Missing deps.') if self._check_missing_direct_deps: for (tgt_pair, evidence) in missing_direct_tgt_deps: evidence_str = '\n'.join([ ' %s uses %s' % (shorten(e[0]), shorten(e[1])) for e in evidence ]) self._context.log.warn( 'Missing direct BUILD dependency %s -> %s because:\n%s' % (tgt_pair[0].address, tgt_pair[1].address, evidence_str)) if self._check_missing_direct_deps == 'fatal': raise TaskError('Missing direct deps.') if self._check_unnecessary_deps: raise TaskError( 'Unnecessary dep warnings not implemented yet.')
def create_binary(self, binary): import platform safe_mkdir(self.outdir) jarmap = self.context.products.get('jars') binary_jarname = '%s.jar' % binary.basename binaryjarpath = os.path.join(self.outdir, binary_jarname) self.context.log.info('creating %s' % os.path.relpath(binaryjarpath, get_buildroot())) with open_jar(binaryjarpath, 'w', compression=self.compression, allowZip64=self.zip64) as jar: def add_jars(target): generated = jarmap.get(target) if generated: for basedir, jars in generated.items(): for internaljar in jars: self.dump(os.path.join(basedir, internaljar), jar) binary.walk(add_jars, lambda t: t.is_internal) if self.deployjar: for basedir, externaljar in self.list_jar_dependencies(binary): self.dump(os.path.join(basedir, externaljar), jar) def write_binary_data(product_type): data = self.context.products.get_data(product_type).get(binary) if data: for root, rel_paths in data.rel_paths(): for rel_path in rel_paths: jar.write(os.path.join(root, rel_path), arcname=rel_path) write_binary_data('classes_by_target') write_binary_data('resources_by_target') manifest = Manifest() manifest.addentry(Manifest.MANIFEST_VERSION, '1.0') manifest.addentry( Manifest.CREATED_BY, 'python %s pants %s (Twitter, Inc.)' % (platform.python_version(), get_version())) main = binary.main or '*** java -jar not supported, please use -cp and pick a main ***' manifest.addentry(Manifest.MAIN_CLASS, main) jar.writestr(Manifest.PATH, manifest.contents()) jarmap.add(binary, self.outdir, [binary_jarname])