def execute(self, args=()): entry_point = self.entry() with mutable_sys(): sys.path, sys.path_importer_cache = self.minimum_path() self._env.activate() if 'PEX_COVERAGE' in os.environ: PEX.start_coverage() TRACER.log('PYTHONPATH now %s' % ':'.join(sys.path)) force_interpreter = 'PEX_INTERPRETER' in os.environ if entry_point and not force_interpreter: self.execute_entry(entry_point, args) else: os.unsetenv('PEX_INTERPRETER') TRACER.log('%s, dropping into interpreter' % ( 'PEX_INTERPRETER specified' if force_interpreter else 'No entry point specified.')) if sys.argv[1:]: try: with open(sys.argv[1]) as fp: ast = compile(fp.read(), fp.name, 'exec') except IOError as e: print("Could not open %s in the environment [%s]: %s" % (sys.argv[1], sys.argv[0], e)) sys.exit(1) sys.argv = sys.argv[1:] old_name = globals()['__name__'] try: globals()['__name__'] = '__main__' Compatibility.exec_function(ast, globals()) finally: globals()['__name__'] = old_name else: import code code.interact()
def parse(self, **global_args): """The entry point to parsing of a BUILD file. Changes the working directory to the BUILD file directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in addition to any globals specified as kwargs. As target methods are parsed they can examine the stack to find these globals and thus locate themselves for the purposes of finding files (see locate() and bind()).""" from twitter.pants.targets.sources import SourceRoot if self.buildfile not in ParseContext._parsed: buildfile_family = tuple(self.buildfile.family()) pants_context = {} for str_to_exec in self._strs_to_exec: ast = compile(str_to_exec, '<string>', 'exec') Compatibility.exec_function(ast, pants_context) with ParseContext.activate(self): for buildfile in buildfile_family: # We may have traversed a sibling already, guard against re-parsing it. if buildfile not in ParseContext._parsed: ParseContext._parsed.add(buildfile) buildfile_dir = os.path.dirname(buildfile.full_path) eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, 'globs': Fileset.lazy_rel_globs(buildfile_dir), 'rglobs': Fileset.lazy_rel_rglobs(buildfile_dir), 'source_root': SourceRoot.lazy_rel_source_root(buildfile_dir), }) eval_globals.update(global_args) Compatibility.exec_function(buildfile.code(), eval_globals)
def parse(self, **globalargs): """The entrypoint to parsing of a BUILD file. Changes the working directory to the BUILD file directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in addition to any globals specified as kwargs. As target methods are parsed they can examine the stack to find these globals and thus locate themselves for the purposes of finding files (see locate() and bind()).""" if self.buildfile not in ParseContext._parsed: buildfile_family = tuple(self.buildfile.family()) ParseContext._parsed.update(buildfile_family) pants_context = {} for str_to_exec in self._strs_to_exec: ast = compile(str_to_exec, '<string>', 'exec') Compatibility.exec_function(ast, pants_context) with ParseContext.activate(self): start = os.path.abspath(os.curdir) try: os.chdir(self.buildfile.parent_path) for buildfile in buildfile_family: self.buildfile = buildfile eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out 'PANTS_NEW': ParseContext.PANTS_NEW }) eval_globals.update(globalargs) Compatibility.exec_function(buildfile.code(), eval_globals) finally: os.chdir(start)
def execute(self, args=()): entry_point = self.entry() with mutable_sys(): sys.path, sys.path_importer_cache = self.minimum_path() self._env.activate() if 'PEX_COVERAGE' in os.environ: PEX.start_coverage() self.debug('PYTHONPATH now %s' % ':'.join(sys.path)) force_interpreter = 'PEX_INTERPRETER' in os.environ if entry_point and not force_interpreter: self.execute_entry(entry_point, args) else: self.debug('%s, dropping into interpreter' % ('PEX_INTERPRETER specified' if force_interpreter else 'No entry point specified.')) if sys.argv[1:]: try: with open(sys.argv[1]) as fp: ast = compile(fp.read(), fp.name, 'exec') except IOError as e: print("Could not open %s in the environment [%s]: %s" % (sys.argv[1], sys.argv[0], e)) sys.exit(1) sys.argv = sys.argv[1:] old_name = globals()['__name__'] try: globals()['__name__'] = '__main__' Compatibility.exec_function(ast, globals()) finally: globals()['__name__'] = old_name else: import code code.interact()
def parse(self, **globals): """The entrypoint to parsing of a BUILD file. Changes the working directory to the BUILD file directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in addition to any globals specified as kwargs. As target methods are parsed they can examine the stack to find these globals and thus locate themselves for the purposes of finding files (see locate() and bind()).""" if self.buildfile not in ParseContext._parsed: buildfile_family = tuple(self.buildfile.family()) ParseContext._parsed.update(buildfile_family) pants_context = {} ast = compile("from twitter.pants import *", "<string>", "exec") Compatibility.exec_function(ast, pants_context) def _parse(): start = os.path.abspath(os.curdir) try: os.chdir(self.buildfile.parent_path) for buildfile in buildfile_family: self.buildfile = buildfile eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out 'PANTS_NEW': False }) eval_globals.update(globals) Compatibility.exec_function(buildfile.code(), eval_globals) finally: os.chdir(start) self.do_in_context(_parse)
def load_hooks_file(cls, path): """Load a file containing hooks. If there are any errors compiling or executing the file, the errors will be logged, the hooks from the file will be skipped, but the execution of the command will continue. """ with open(path, "r") as hooks_file: hooks_data = hooks_file.read() hooks_code = None try: hooks_code = compile(hooks_data, path, "exec") except (SyntaxError, TypeError) as e: logging.warn("Error compiling hooks file %s: %s" % (path, e)) print("Error compiling hooks file %s: %s" % (path, e), file=sys.stderr) return {} hooks_environment = {} try: Compatibility.exec_function(hooks_code, hooks_environment) except Exception as e: # Unfortunately, exec could throw *anything* at all. logging.warn("Warning: error loading hooks file %s: %s" % (path, e)) print("Warning: error loading hooks file %s: %s" % (path, e), file=sys.stderr) return {} for hook in hooks_environment.get("hooks", []): cls.register_command_hook(hook) return hooks_environment
def parse(self, **globalargs): """The entry point to parsing of a BUILD file. from pants.targets.sources import SourceRoot See locate(). """ if self.buildfile not in ParseContext._parsed: buildfile_family = tuple(self.buildfile.family()) pants_context = self.default_globals(Config.load()) with ParseContext.activate(self): for buildfile in buildfile_family: self._active_buildfile = buildfile # We may have traversed a sibling already, guard against re-parsing it. if buildfile not in ParseContext._parsed: ParseContext._parsed.add(buildfile) buildfile_dir = os.path.dirname(buildfile.full_path) # TODO(John Sirois): XXX imports are done here to prevent a cycles from pants.targets.jvm_binary import Bundle from pants.targets.sources import SourceRoot class RelativeBundle(Bundle): def __init__(self, mapper=None, relative_to=None): super(RelativeBundle, self).__init__( base=buildfile_dir, mapper=mapper, relative_to=relative_to) # TODO(John Sirois): This is not build-dictionary friendly - rework SourceRoot to allow # allow for doc of both register (as source_root) and source_root.here(*types). class RelativeSourceRoot(object): @staticmethod def here(*allowed_target_types): """Registers the cwd as a source root for the given target types.""" SourceRoot.register(buildfile_dir, *allowed_target_types) def __init__(self, basedir, *allowed_target_types): SourceRoot.register(os.path.join(buildfile_dir, basedir), *allowed_target_types) eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, 'globs': partial(Fileset.globs, root=buildfile_dir), 'rglobs': partial(Fileset.rglobs, root=buildfile_dir), 'zglobs': partial(Fileset.zglobs, root=buildfile_dir), 'source_root': RelativeSourceRoot, 'bundle': RelativeBundle }) eval_globals.update(globalargs) Compatibility.exec_function(buildfile.code(), eval_globals)
def test_code(self): with safe_open(self.fullpath('BUILD.code'), 'w') as fp: fp.write('lib = java_library(name="jake", age=42)') build_file = self.create_buildfile('BUILD.code') parsed_locals = Compatibility.exec_function(build_file.code(), {'java_library': dict}) lib = parsed_locals.pop('lib', None) self.assertEqual(dict(name='jake', age=42), lib)
def _parse(): start = os.path.abspath(os.curdir) try: os.chdir(self.buildfile.parent_path) for buildfile in buildfile_family: self.buildfile = buildfile eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out 'PANTS_NEW': False }) eval_globals.update(globals) Compatibility.exec_function(buildfile.code(), eval_globals) finally: os.chdir(start)
def parse_build_file(self, build_file): """Capture Addressable instances from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and return the Addressable instances generated by executing the code. """ logger.debug("Parsing BUILD file {build_file}." .format(build_file=build_file)) try: build_file_code = build_file.code() except Exception: logger.exception("Error parsing {build_file}.".format(build_file=build_file)) traceback.print_exc() raise parse_state = self._build_configuration.initialize_parse_state(build_file) try: Compatibility.exec_function(build_file_code, parse_state.parse_globals) except Exception: logger.exception("Error parsing {build_file}.".format(build_file=build_file)) traceback.print_exc() raise address_map = {} for address, addressable in parse_state.registered_addressable_instances: logger.debug('Adding {addressable} to the BuildFileParser address map with {address}' .format(addressable=addressable, address=address)) if address in address_map: conflicting_addressable = address_map[address] raise BuildFileParser.TargetConflictException( "File {conflicting_file} defines address '{target_name}' more than once." .format(conflicting_file=address.build_file, target_name=address.target_name)) address_map[address] = addressable logger.debug("{build_file} produced the following Addressables:" .format(build_file=build_file)) for address, addressable in address_map.items(): logger.debug(" * {address}: {addressable}" .format(address=address, addressable=addressable)) return address_map
def load_module(self, fullmodname): """PEP-302-compliant load_module() method. Args: fullmodname: The dot-separated full module name, e.g. 'django.core.mail'. Returns: The module object constructed from the source code. Raises: SyntaxError if the module's source code is syntactically incorrect. ImportError if there was a problem accessing the source code. Whatever else can be raised by executing the module's source code. """ with self._log_nested('entering load_module(%s)' % fullmodname, at_level=3): submodname, is_package, fullpath, code = self._get_code( fullmodname) mod = sys.modules.get(fullmodname) try: if mod is None: mod = sys.modules[fullmodname] = types.ModuleType( fullmodname) mod.__loader__ = self mod.__file__ = fullpath mod.__name__ = fullmodname self._log('** __file__ = %s' % mod.__file__, at_level=4) self._log('** __name__ = %s' % mod.__name__, at_level=4) if is_package: mod.__path__ = [os.path.dirname(mod.__file__)] self._log('** __path__ = %s' % mod.__path__, at_level=4) Compatibility.exec_function(code, mod.__dict__) except Exception as e: self._log('Caught exception: %s' % e) if fullmodname in sys.modules: del sys.modules[fullmodname] raise self._log('exiting load_module(%s) => __file__ = %s, __name__ = %s' % (fullmodname, mod.__file__, mod.__name__), at_level=3) # We have to do this because of modules like _apipkg that rewrite sys.modules and # expect that to be what gets written into the global namespace. return sys.modules.get(fullmodname)
def load_module(self, fullmodname): """PEP-302-compliant load_module() method. Args: fullmodname: The dot-separated full module name, e.g. 'django.core.mail'. Returns: The module object constructed from the source code. Raises: SyntaxError if the module's source code is syntactically incorrect. ImportError if there was a problem accessing the source code. Whatever else can be raised by executing the module's source code. """ with self._log_nested('entering load_module(%s)' % fullmodname, at_level=3): submodname, is_package, fullpath, code = self._get_code(fullmodname) mod = sys.modules.get(fullmodname) try: if mod is None: mod = sys.modules[fullmodname] = types.ModuleType(fullmodname) mod.__loader__ = self mod.__file__ = fullpath mod.__name__ = fullmodname self._log('** __file__ = %s' % mod.__file__, at_level=4) self._log('** __name__ = %s' % mod.__name__, at_level=4) if is_package: mod.__path__ = [os.path.dirname(mod.__file__)] self._log('** __path__ = %s' % mod.__path__, at_level=4) Compatibility.exec_function(code, mod.__dict__) except Exception as e: self._log('Caught exception: %s' % e) if fullmodname in sys.modules: del sys.modules[fullmodname] raise self._log('exiting load_module(%s) => __file__ = %s, __name__ = %s' % ( fullmodname, mod.__file__, mod.__name__), at_level=3) # We have to do this because of modules like _apipkg that rewrite sys.modules and # expect that to be what gets written into the global namespace. return sys.modules.get(fullmodname)
def default_globals(cls, config=None): """ Has pants.*, but not file-specfic things like __file__ If you want to add new imports to be available to all BUILD files, add a section to the config similar to: [parse] headers: ['from test import get_jar',] You may also need to add new roots to the sys.path. see _run in pants_exe.py """ to_exec = list(cls._strs_to_exec) if config: # TODO: This can be replaced once extensions are enabled with # https://github.com/pantsbuild/pants/issues/5 to_exec.extend(config.getlist('parse', 'headers', default=[])) pants_context = {} for str_to_exec in to_exec: ast = compile(str_to_exec, '<string>', 'exec') Compatibility.exec_function(ast, pants_context) return pants_context
def default_globals(cls, config=None): """ Has twitter.pants.*, but not file-specfic things like __file__ If you want to add new imports to be available to all BUILD files, add a section to the config similar to: [parse] headers: ['from test import get_jar',] You may also need to add new roots to the sys.path. see _run in pants_exe.py """ to_exec = list(cls._strs_to_exec) if config: # TODO: This can be replaced once extensions are enabled with # https://github.com/pantsbuild/pants/issues/5 to_exec.extend(config.getlist('parse', 'headers', default=[])) pants_context = {} for str_to_exec in to_exec: ast = compile(str_to_exec, '<string>', 'exec') Compatibility.exec_function(ast, pants_context) return pants_context
def parse(self, **global_args): """The entry point to parsing of a BUILD file. Changes the working directory to the BUILD file directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in addition to any globals specified as kwargs. As target methods are parsed they can examine the stack to find these globals and thus locate themselves for the purposes of finding files (see locate() and bind()).""" if self.buildfile not in ParseContext._parsed: buildfile_family = tuple(self.buildfile.family()) pants_context = {} for str_to_exec in self._strs_to_exec: ast = compile(str_to_exec, '<string>', 'exec') Compatibility.exec_function(ast, pants_context) with ParseContext.activate(self): start = os.path.abspath(os.curdir) try: os.chdir(self.buildfile.parent_path) for buildfile in buildfile_family: # We may have traversed a sibling already, guard against re-parsing it. if buildfile not in ParseContext._parsed: ParseContext._parsed.add(buildfile) eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, }) eval_globals.update(global_args) Compatibility.exec_function( buildfile.code(), eval_globals) finally: os.chdir(start)
def parse_build_file(self, build_file): """Capture Addressable instances from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and return the Addressable instances generated by executing the code. """ def _format_context_msg(lineno, offset, error_type, message): """Show the line of the BUILD file that has the error along with a few line of context""" with open(build_file.full_path, "r") as build_contents: context = "Error parsing {path}:\n".format(path=build_file.full_path) curr_lineno = 0 for line in build_contents.readlines(): curr_lineno += 1 if curr_lineno == lineno: highlight = '*' else: highlight = ' ' if curr_lineno >= lineno - 3: context += "{highlight}{curr_lineno:4d}: {line}".format( highlight=highlight, line=line, curr_lineno=curr_lineno) if offset and lineno == curr_lineno: context += " {caret:>{width}} {error_type}: {message}\n\n" \ .format(caret="^", width=int(offset), error_type=error_type, message=message) if curr_lineno > lineno + 3: break return context logger.debug("Parsing BUILD file {build_file}." .format(build_file=build_file)) try: build_file_code = build_file.code() except SyntaxError as e: raise self.ParseError(_format_context_msg(e.lineno, e.offset, e.__class__.__name__, e)) except Exception as e: raise self.ParseError("{error_type}: {message}\n while parsing BUILD file {build_file}" .format(error_type=e.__class__.__name__, message=e, build_file=build_file)) parse_state = self._build_configuration.initialize_parse_state(build_file) try: Compatibility.exec_function(build_file_code, parse_state.parse_globals) except Exception as e: raise self.ExecuteError("{message}\n while executing BUILD file {build_file}" .format(message=e, build_file=build_file)) address_map = {} for address, addressable in parse_state.registered_addressable_instances: logger.debug('Adding {addressable} to the BuildFileParser address map with {address}' .format(addressable=addressable, address=address)) if address in address_map: raise self.AddressableConflictException( "File {conflicting_file} defines address '{target_name}' more than once." .format(conflicting_file=address.build_file, target_name=address.target_name)) address_map[address] = addressable logger.debug("{build_file} produced the following Addressables:" .format(build_file=build_file)) for address, addressable in address_map.items(): logger.debug(" * {address}: {addressable}" .format(address=address, addressable=addressable)) return address_map
def parse_build_file(self, build_file): """Capture TargetProxies from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and record the TargetProxies generated by executing the code. """ if build_file in self._added_build_files: logger.debug('BuildFile {build_file} has already been parsed.' .format(build_file=build_file)) return logger.debug("Parsing BUILD file {build_file}." .format(build_file=build_file)) parse_context = {} # TODO(pl): Don't inject __file__ into the context. BUILD files should not be aware # of their location on the filesystem. parse_context['__file__'] = build_file.full_path parse_context.update(self._exposed_objects) parse_context.update( (key, partial(util, rel_path=build_file.spec_path)) for key, util in self._partial_path_relative_utils.items() ) parse_context.update( (key, util(rel_path=build_file.spec_path)) for key, util in self._applicative_path_relative_utils.items() ) registered_target_proxies = set() parse_context.update( (alias, TargetCallProxy(target_type=target_type, build_file=build_file, registered_target_proxies=registered_target_proxies)) for alias, target_type in self._target_alias_map.items() ) try: build_file_code = build_file.code() except: logger.exception("Error parsing {build_file}." .format(build_file=build_file)) traceback.print_exc() raise try: Compatibility.exec_function(build_file_code, parse_context) except: logger.exception("Error running {build_file}." .format(build_file=build_file)) traceback.print_exc() raise for target_proxy in registered_target_proxies: logger.debug('Adding {target_proxy} to the proxy build graph with {address}' .format(target_proxy=target_proxy, address=target_proxy.address)) assert target_proxy.address not in self._target_proxy_by_address, ( '{address} already in BuildGraph._target_proxy_by_address even though this BUILD file has' ' not yet been added to the BuildGraph. The target type is: {target_type}' .format(address=target_proxy.address, target_type=target_proxy.target_type)) assert target_proxy.address not in self.addresses_by_build_file[build_file], ( '{address} has already been associated with {build_file} in the build graph.' .format(address=target_proxy.address, build_file=build_file)) self._target_proxy_by_address[target_proxy.address] = target_proxy self.addresses_by_build_file[build_file].add(target_proxy.address) self._target_proxies_by_build_file[build_file].add(target_proxy) self._added_build_files.add(build_file) logger.debug("{build_file} produced the following TargetProxies:" .format(build_file=build_file)) for target_proxy in registered_target_proxies: logger.debug(" * {target_proxy}".format(target_proxy=target_proxy))
def parse_build_file(self, build_file): """Capture TargetProxies from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and record the TargetProxies generated by executing the code. """ if build_file in self._added_build_files: logger.debug( 'BuildFile {build_file} has already been parsed.'.format( build_file=build_file)) return logger.debug( "Parsing BUILD file {build_file}.".format(build_file=build_file)) parse_context = {} # TODO(pl): Don't inject __file__ into the context. BUILD files should not be aware # of their location on the filesystem. parse_context['__file__'] = build_file.full_path parse_context.update(self._exposed_objects) parse_context.update( (key, partial(util, rel_path=build_file.spec_path)) for key, util in self._partial_path_relative_utils.items()) parse_context.update( (key, util(rel_path=build_file.spec_path)) for key, util in self._applicative_path_relative_utils.items()) registered_target_proxies = set() parse_context.update( (alias, TargetCallProxy( target_type=target_type, build_file=build_file, registered_target_proxies=registered_target_proxies)) for alias, target_type in self._target_alias_map.items()) try: build_file_code = build_file.code() except: logger.exception( "Error parsing {build_file}.".format(build_file=build_file)) traceback.print_exc() raise try: Compatibility.exec_function(build_file_code, parse_context) except: logger.exception( "Error running {build_file}.".format(build_file=build_file)) traceback.print_exc() raise for target_proxy in registered_target_proxies: logger.debug( 'Adding {target_proxy} to the proxy build graph with {address}' .format(target_proxy=target_proxy, address=target_proxy.address)) assert target_proxy.address not in self._target_proxy_by_address, ( '{address} already in BuildGraph._target_proxy_by_address even though this BUILD file has' ' not yet been added to the BuildGraph. The target type is: {target_type}' .format(address=target_proxy.address, target_type=target_proxy.target_type)) assert target_proxy.address not in self.addresses_by_build_file[ build_file], ( '{address} has already been associated with {build_file} in the build graph.' .format(address=target_proxy.address, build_file=build_file)) self._target_proxy_by_address[target_proxy.address] = target_proxy self.addresses_by_build_file[build_file].add(target_proxy.address) self._target_proxies_by_build_file[build_file].add(target_proxy) self._added_build_files.add(build_file) logger.debug( "{build_file} produced the following TargetProxies:".format( build_file=build_file)) for target_proxy in registered_target_proxies: logger.debug( " * {target_proxy}".format(target_proxy=target_proxy))
def parse_build_file(self, build_file): """Capture TargetProxies from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and record the TargetProxies generated by executing the code. """ if build_file in self._added_build_files: logger.debug( 'BuildFile {build_file} has already been parsed.'.format( build_file=build_file)) return logger.debug( "Parsing BUILD file {build_file}.".format(build_file=build_file)) parse_context = {} # TODO(pl): Don't inject __file__ into the context. BUILD files should not be aware # of their location on the filesystem. parse_context['__file__'] = build_file.full_path parse_context.update(self._exposed_objects) parse_context.update( (key, partial(util, rel_path=build_file.spec_path)) for key, util in self._partial_path_relative_utils.items()) parse_context.update( (key, util(rel_path=build_file.spec_path)) for key, util in self._applicative_path_relative_utils.items()) registered_target_proxies = set() parse_context.update( (alias, TargetCallProxy( target_type=target_type, build_file=build_file, registered_target_proxies=registered_target_proxies)) for alias, target_type in self._target_alias_map.items()) for key, func in self._target_creation_utils.items(): parse_context.update({key: partial(func, alias_map=parse_context)}) try: build_file_code = build_file.code() except Exception: logger.exception( "Error parsing {build_file}.".format(build_file=build_file)) traceback.print_exc() raise try: Compatibility.exec_function(build_file_code, parse_context) except Exception: logger.exception( "Error running {build_file}.".format(build_file=build_file)) traceback.print_exc() raise for target_proxy in registered_target_proxies: logger.debug( 'Adding {target_proxy} to the proxy build graph with {address}' .format(target_proxy=target_proxy, address=target_proxy.address)) if target_proxy.address in self._target_proxy_by_address: conflicting_target = self._target_proxy_by_address[ target_proxy.address] if (conflicting_target.address.build_file != target_proxy.address.build_file): raise BuildFileParser.SiblingConflictException( "Both {conflicting_file} and {target_file} define the same target '{target_name}'" .format(conflicting_file=conflicting_target.address. build_file, target_file=target_proxy.address.build_file, target_name=conflicting_target.address. target_name)) raise BuildFileParser.TargetConflictException( "File {conflicting_file} defines target '{target_name}' more than once." .format( conflicting_file=conflicting_target.address.build_file, target_name=conflicting_target.address.target_name)) assert target_proxy.address not in self.addresses_by_build_file[ build_file], ( '{address} has already been associated with {build_file} in the build graph.' .format(address=target_proxy.address, build_file=build_file)) self._target_proxy_by_address[target_proxy.address] = target_proxy self.addresses_by_build_file[build_file].add(target_proxy.address) self._target_proxies_by_build_file[build_file].add(target_proxy) self._added_build_files.add(build_file) logger.debug( "{build_file} produced the following TargetProxies:".format( build_file=build_file)) for target_proxy in registered_target_proxies: logger.debug( " * {target_proxy}".format(target_proxy=target_proxy))
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import code from twitter.common.lang import Compatibility from apache.aurora.config.loader import AuroraConfigLoader code.interact('Mesos Config REPL', local=Compatibility.exec_function(AuroraConfigLoader.DEFAULT_SCHEMA, globals()))
def parse(self, **globalargs): """The entry point to parsing of a BUILD file. from twitter.pants.targets.sources import SourceRoot See locate(). """ if self.buildfile not in ParseContext._parsed: buildfile_family = tuple(self.buildfile.family()) pants_context = self.default_globals(Config.load()) with ParseContext.activate(self): for buildfile in buildfile_family: self._active_buildfile = buildfile # We may have traversed a sibling already, guard against re-parsing it. if buildfile not in ParseContext._parsed: ParseContext._parsed.add(buildfile) buildfile_dir = os.path.dirname(buildfile.full_path) # TODO(John Sirois): XXX imports are done here to prevent a cycles from twitter.pants.targets.jvm_binary import Bundle from twitter.pants.targets.sources import SourceRoot class RelativeBundle(Bundle): def __init__(self, mapper=None, relative_to=None): super(RelativeBundle, self).__init__(base=buildfile_dir, mapper=mapper, relative_to=relative_to) # TODO(John Sirois): This is not build-dictionary friendly - rework SourceRoot to allow # allow for doc of both register (as source_root) and source_root.here(*types). class RelativeSourceRoot(object): @staticmethod def here(*allowed_target_types): """Registers the cwd as a source root for the given target types.""" SourceRoot.register(buildfile_dir, *allowed_target_types) def __init__(self, basedir, *allowed_target_types): SourceRoot.register( os.path.join(buildfile_dir, basedir), *allowed_target_types) eval_globals = copy.copy(pants_context) eval_globals.update({ 'ROOT_DIR': buildfile.root_dir, '__file__': buildfile.full_path, 'globs': partial(Fileset.globs, root=buildfile_dir), 'rglobs': partial(Fileset.rglobs, root=buildfile_dir), 'zglobs': partial(Fileset.zglobs, root=buildfile_dir), 'source_root': RelativeSourceRoot, 'bundle': RelativeBundle }) eval_globals.update(globalargs) Compatibility.exec_function(buildfile.code(), eval_globals)
def parse_build_file(self, build_file): """Capture TargetProxies from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and record the TargetProxies generated by executing the code. """ if build_file in self._added_build_files: logger.debug('BuildFile {build_file} has already been parsed.' .format(build_file=build_file)) return logger.debug("Parsing BUILD file {build_file}.".format(build_file=build_file)) try: build_file_code = build_file.code() except Exception: logger.exception("Error parsing {build_file}.".format(build_file=build_file)) traceback.print_exc() raise parse_state = self._build_configuration.initialize_parse_state(build_file) try: Compatibility.exec_function(build_file_code, parse_state.parse_globals) except Exception: logger.exception("Error parsing {build_file}.".format(build_file=build_file)) traceback.print_exc() raise for target_proxy in parse_state.registered_target_proxies: logger.debug('Adding {target_proxy} to the proxy build graph with {address}' .format(target_proxy=target_proxy, address=target_proxy.address)) if target_proxy.address in self._target_proxy_by_address: conflicting_target = self._target_proxy_by_address[target_proxy.address] if conflicting_target.address.build_file != target_proxy.address.build_file: raise BuildFileParser.SiblingConflictException( "Both {conflicting_file} and {target_file} define the same target '{target_name}'" .format(conflicting_file=conflicting_target.address.build_file, target_file=target_proxy.address.build_file, target_name=conflicting_target.address.target_name)) raise BuildFileParser.TargetConflictException( "File {conflicting_file} defines target '{target_name}' more than once." .format(conflicting_file=conflicting_target.address.build_file, target_name=conflicting_target.address.target_name)) assert target_proxy.address not in self.addresses_by_build_file[build_file], ( '{address} has already been associated with {build_file} in the build graph.' .format(address=target_proxy.address, build_file=build_file)) self._target_proxy_by_address[target_proxy.address] = target_proxy self.addresses_by_build_file[build_file].add(target_proxy.address) self._target_proxies_by_build_file[build_file].add(target_proxy) self._added_build_files.add(build_file) logger.debug("{build_file} produced the following TargetProxies:" .format(build_file=build_file)) for target_proxy in parse_state.registered_target_proxies: logger.debug(" * {target_proxy}".format(target_proxy=target_proxy)) return parse_state.registered_target_proxies
def parse_build_file(self, build_file): """Capture Addressable instances from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and return the Addressable instances generated by executing the code. """ def _format_context_msg(lineno, offset, error_type, message): """Show the line of the BUILD file that has the error along with a few line of context""" with open(build_file.full_path, "r") as build_contents: context = "Error parsing {path}:\n".format( path=build_file.full_path) curr_lineno = 0 for line in build_contents.readlines(): curr_lineno += 1 if curr_lineno == lineno: highlight = '*' else: highlight = ' ' if curr_lineno >= lineno - 3: context += "{highlight}{curr_lineno:4d}: {line}".format( highlight=highlight, line=line, curr_lineno=curr_lineno) if offset and lineno == curr_lineno: context += " {caret:>{width}} {error_type}: {message}\n\n" \ .format(caret="^", width=int(offset), error_type=error_type, message=message) if curr_lineno > lineno + 3: break return context logger.debug( "Parsing BUILD file {build_file}.".format(build_file=build_file)) try: build_file_code = build_file.code() except SyntaxError as e: raise self.ParseError( _format_context_msg(e.lineno, e.offset, e.__class__.__name__, e)) except Exception as e: raise self.ParseError( "{error_type}: {message}\n while parsing BUILD file {build_file}" .format(error_type=e.__class__.__name__, message=e, build_file=build_file)) parse_state = self._build_configuration.initialize_parse_state( build_file) try: Compatibility.exec_function(build_file_code, parse_state.parse_globals) except Exception as e: raise self.ExecuteError( "{message}\n while executing BUILD file {build_file}".format( message=e, build_file=build_file)) address_map = {} for address, addressable in parse_state.registered_addressable_instances: logger.debug( 'Adding {addressable} to the BuildFileParser address map with {address}' .format(addressable=addressable, address=address)) if address in address_map: raise self.AddressableConflictException( "File {conflicting_file} defines address '{target_name}' more than once." .format(conflicting_file=address.build_file, target_name=address.target_name)) address_map[address] = addressable logger.debug( "{build_file} produced the following Addressables:".format( build_file=build_file)) for address, addressable in address_map.items(): logger.debug(" * {address}: {addressable}".format( address=address, addressable=addressable)) return address_map
def parse_build_file(self, build_file): """Capture TargetProxies from parsing `build_file`. Prepare a context for parsing, read a BUILD file from the filesystem, and record the TargetProxies generated by executing the code. """ if build_file in self._added_build_files: logger.debug('BuildFile {build_file} has already been parsed.' .format(build_file=build_file)) return logger.debug("Parsing BUILD file {build_file}." .format(build_file=build_file)) parse_context = {} # TODO(pl): Don't inject __file__ into the context. BUILD files should not be aware # of their location on the filesystem. parse_context['__file__'] = build_file.full_path parse_context.update(self._exposed_objects) parse_context.update( (key, partial(util, rel_path=build_file.spec_path)) for key, util in self._partial_path_relative_utils.items() ) parse_context.update( (key, util(rel_path=build_file.spec_path)) for key, util in self._applicative_path_relative_utils.items() ) registered_target_proxies = set() parse_context.update( (alias, TargetCallProxy(target_type=target_type, build_file=build_file, registered_target_proxies=registered_target_proxies)) for alias, target_type in self._target_alias_map.items() ) for key, func in self._target_creation_utils.items(): parse_context.update({key: partial(func, alias_map=parse_context)}) try: build_file_code = build_file.code() except Exception: logger.exception("Error parsing {build_file}." .format(build_file=build_file)) traceback.print_exc() raise try: Compatibility.exec_function(build_file_code, parse_context) except Exception: logger.exception("Error running {build_file}." .format(build_file=build_file)) traceback.print_exc() raise for target_proxy in registered_target_proxies: logger.debug('Adding {target_proxy} to the proxy build graph with {address}' .format(target_proxy=target_proxy, address=target_proxy.address)) if target_proxy.address in self._target_proxy_by_address: conflicting_target = self._target_proxy_by_address[target_proxy.address] if (conflicting_target.address.build_file != target_proxy.address.build_file): raise BuildFileParser.SiblingConflictException( "Both {conflicting_file} and {target_file} define the same target '{target_name}'" .format(conflicting_file=conflicting_target.address.build_file, target_file=target_proxy.address.build_file, target_name=conflicting_target.address.target_name)) raise BuildFileParser.TargetConflictException( "File {conflicting_file} defines target '{target_name}' more than once." .format(conflicting_file=conflicting_target.address.build_file, target_name=conflicting_target.address.target_name)) assert target_proxy.address not in self.addresses_by_build_file[build_file], ( '{address} has already been associated with {build_file} in the build graph.' .format(address=target_proxy.address, build_file=build_file)) self._target_proxy_by_address[target_proxy.address] = target_proxy self.addresses_by_build_file[build_file].add(target_proxy.address) self._target_proxies_by_build_file[build_file].add(target_proxy) self._added_build_files.add(build_file) logger.debug("{build_file} produced the following TargetProxies:" .format(build_file=build_file)) for target_proxy in registered_target_proxies: logger.debug(" * {target_proxy}".format(target_proxy=target_proxy))