Ejemplo n.º 1
0
  def parse(self, **global_args):
    """The entry point to parsing of a BUILD file.  Changes the working directory to the BUILD file
    directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in
    addition to any globals specified as kwargs.  As target methods are parsed they can examine the
    stack to find these globals and thus locate themselves for the purposes of finding files
    (see locate() and bind())."""

    from twitter.pants.targets.sources import SourceRoot

    if self.buildfile not in ParseContext._parsed:
      buildfile_family = tuple(self.buildfile.family())

      pants_context = {}
      for str_to_exec in self._strs_to_exec:
        ast = compile(str_to_exec, '<string>', 'exec')
        Compatibility.exec_function(ast, pants_context)

      with ParseContext.activate(self):
        for buildfile in buildfile_family:
          # We may have traversed a sibling already, guard against re-parsing it.
          if buildfile not in ParseContext._parsed:
            ParseContext._parsed.add(buildfile)

            buildfile_dir = os.path.dirname(buildfile.full_path)

            eval_globals = copy.copy(pants_context)
            eval_globals.update({
              'ROOT_DIR': buildfile.root_dir,
              '__file__': buildfile.full_path,
              'globs': Fileset.lazy_rel_globs(buildfile_dir),
              'rglobs': Fileset.lazy_rel_rglobs(buildfile_dir),
              'source_root': SourceRoot.lazy_rel_source_root(buildfile_dir),
            })
            eval_globals.update(global_args)
            Compatibility.exec_function(buildfile.code(), eval_globals)
Ejemplo n.º 2
0
 def execute(self, args=()):
   entry_point = self.entry()
   with mutable_sys():
     sys.path, sys.path_importer_cache = self.minimum_path()
     self._env.activate()
     if 'PEX_COVERAGE' in os.environ:
       PEX.start_coverage()
     TRACER.log('PYTHONPATH now %s' % ':'.join(sys.path))
     force_interpreter = 'PEX_INTERPRETER' in os.environ
     if entry_point and not force_interpreter:
       self.execute_entry(entry_point, args)
     else:
       os.unsetenv('PEX_INTERPRETER')
       TRACER.log('%s, dropping into interpreter' % (
           'PEX_INTERPRETER specified' if force_interpreter else 'No entry point specified.'))
       if sys.argv[1:]:
         try:
           with open(sys.argv[1]) as fp:
             ast = compile(fp.read(), fp.name, 'exec')
         except IOError as e:
           print("Could not open %s in the environment [%s]: %s" % (sys.argv[1], sys.argv[0], e))
           sys.exit(1)
         sys.argv = sys.argv[1:]
         old_name = globals()['__name__']
         try:
           globals()['__name__'] = '__main__'
           Compatibility.exec_function(ast, globals())
         finally:
           globals()['__name__'] = old_name
       else:
         import code
         code.interact()
Ejemplo n.º 3
0
 def load_hooks_file(cls, path):
   """Load a file containing hooks. If there are any errors compiling or executing the file,
   the errors will be logged, the hooks from the file will be skipped, but the execution of
   the command will continue.
   """
   with open(path, "r") as hooks_file:
     hooks_data = hooks_file.read()
     hooks_code = None
     try:
       hooks_code = compile(hooks_data, path, "exec")
     except (SyntaxError, TypeError) as e:
       logging.warn("Error compiling hooks file %s: %s" % (path, e))
       print("Error compiling hooks file %s: %s" % (path, e), file=sys.stderr)
       return {}
     hooks_environment = {}
     try:
       Compatibility.exec_function(hooks_code, hooks_environment)
     except Exception as e:
       # Unfortunately, exec could throw *anything* at all.
       logging.warn("Warning: error loading hooks file %s: %s" % (path, e))
       print("Warning: error loading hooks file %s: %s" % (path, e), file=sys.stderr)
       return {}
     for hook in hooks_environment.get("hooks", []):
       cls.register_command_hook(hook)
     return hooks_environment
Ejemplo n.º 4
0
  def parse(self, **globals):
    """The entrypoint to parsing of a BUILD file.  Changes the working directory to the BUILD file
    directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in
    addition to any globals specified as kwargs.  As target methods are parsed they can examine the
    stack to find these globals and thus locate themselves for the purposes of finding files
    (see locate() and bind())."""

    if self.buildfile not in ParseContext._parsed:
      buildfile_family = tuple(self.buildfile.family())
      ParseContext._parsed.update(buildfile_family)

      pants_context = {}
      ast = compile("from twitter.pants import *", "<string>", "exec")
      Compatibility.exec_function(ast, pants_context)

      def _parse():
        start = os.path.abspath(os.curdir)
        try:
          os.chdir(self.buildfile.parent_path)
          for buildfile in buildfile_family:
            self.buildfile = buildfile
            eval_globals = copy.copy(pants_context)
            eval_globals.update({
              'ROOT_DIR': buildfile.root_dir,
              '__file__': buildfile.full_path,

              # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out
              'PANTS_NEW': False
            })
            eval_globals.update(globals)
            Compatibility.exec_function(buildfile.code(), eval_globals)
        finally:
          os.chdir(start)

      self.do_in_context(_parse)
Ejemplo n.º 5
0
 def load_hooks_file(cls, path):
     """Load a file containing hooks. If there are any errors compiling or executing the file,
 the errors will be logged, the hooks from the file will be skipped, but the execution of
 the command will continue.
 """
     with open(path, "r") as hooks_file:
         hooks_data = hooks_file.read()
         hooks_code = None
         try:
             hooks_code = compile(hooks_data, path, "exec")
         except (SyntaxError, TypeError) as e:
             logging.warn("Error compiling hooks file %s: %s" % (path, e))
             print("Error compiling hooks file %s: %s" % (path, e),
                   file=sys.stderr)
             return {}
         hooks_environment = {}
         try:
             Compatibility.exec_function(hooks_code, hooks_environment)
         except Exception as e:
             # Unfortunately, exec could throw *anything* at all.
             logging.warn("Warning: error loading hooks file %s: %s" %
                          (path, e))
             print("Warning: error loading hooks file %s: %s" % (path, e),
                   file=sys.stderr)
             return {}
         for hook in hooks_environment.get("hooks", []):
             cls.register_command_hook(hook)
         return hooks_environment
Ejemplo n.º 6
0
 def execute(self, args=()):
   entry_point = self.entry()
   with mutable_sys():
     sys.path, sys.path_importer_cache = self.minimum_path()
     self._env.activate()
     if 'PEX_COVERAGE' in os.environ:
       PEX.start_coverage()
     self.debug('PYTHONPATH now %s' % ':'.join(sys.path))
     force_interpreter = 'PEX_INTERPRETER' in os.environ
     if entry_point and not force_interpreter:
       self.execute_entry(entry_point, args)
     else:
       self.debug('%s, dropping into interpreter' % ('PEX_INTERPRETER specified' if force_interpreter
          else 'No entry point specified.'))
       if sys.argv[1:]:
         try:
           with open(sys.argv[1]) as fp:
             ast = compile(fp.read(), fp.name, 'exec')
         except IOError as e:
           print("Could not open %s in the environment [%s]: %s" % (sys.argv[1], sys.argv[0], e))
           sys.exit(1)
         sys.argv = sys.argv[1:]
         old_name = globals()['__name__']
         try:
           globals()['__name__'] = '__main__'
           Compatibility.exec_function(ast, globals())
         finally:
           globals()['__name__'] = old_name
       else:
         import code
         code.interact()
Ejemplo n.º 7
0
    def parse(self, **globalargs):
        """The entrypoint to parsing of a BUILD file.  Changes the working directory to the BUILD file
    directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in
    addition to any globals specified as kwargs.  As target methods are parsed they can examine the
    stack to find these globals and thus locate themselves for the purposes of finding files
    (see locate() and bind())."""

        if self.buildfile not in ParseContext._parsed:
            buildfile_family = tuple(self.buildfile.family())
            ParseContext._parsed.update(buildfile_family)

            pants_context = {}
            for str_to_exec in self._strs_to_exec:
                ast = compile(str_to_exec, '<string>', 'exec')
                Compatibility.exec_function(ast, pants_context)

            with ParseContext.activate(self):
                start = os.path.abspath(os.curdir)
                try:
                    os.chdir(self.buildfile.parent_path)
                    for buildfile in buildfile_family:
                        self.buildfile = buildfile
                        eval_globals = copy.copy(pants_context)
                        eval_globals.update({
                            'ROOT_DIR': buildfile.root_dir,
                            '__file__': buildfile.full_path,

                            # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out
                            'PANTS_NEW': ParseContext.PANTS_NEW
                        })
                        eval_globals.update(globalargs)
                        Compatibility.exec_function(buildfile.code(),
                                                    eval_globals)
                finally:
                    os.chdir(start)
Ejemplo n.º 8
0
 def open(self, url, conn_timeout=None):
   if conn_timeout == 0:
     raise urllib_error.URLError('Could not reach %s within deadline.' % url)
   if url.startswith('http'):
     self.opened.set()
   if self.error:
     raise urllib_error.HTTPError(url, self.error, None, None, Compatibility.BytesIO(b'glhglhg'))
   return urllib_request.addinfourl(Compatibility.BytesIO(self.rv), url, None, self.code)
Ejemplo n.º 9
0
  def parse(self, **globalargs):
    """The entry point to parsing of a BUILD file.

    from pants.targets.sources import SourceRoot

    See locate().
    """
    if self.buildfile not in ParseContext._parsed:
      buildfile_family = tuple(self.buildfile.family())

      pants_context = self.default_globals(Config.load())

      with ParseContext.activate(self):
        for buildfile in buildfile_family:
          self._active_buildfile = buildfile
          # We may have traversed a sibling already, guard against re-parsing it.
          if buildfile not in ParseContext._parsed:
            ParseContext._parsed.add(buildfile)

            buildfile_dir = os.path.dirname(buildfile.full_path)

            # TODO(John Sirois): XXX imports are done here to prevent a cycles
            from pants.targets.jvm_binary import Bundle
            from pants.targets.sources import SourceRoot

            class RelativeBundle(Bundle):
              def __init__(self, mapper=None, relative_to=None):
                super(RelativeBundle, self).__init__(
                    base=buildfile_dir,
                    mapper=mapper,
                    relative_to=relative_to)

            # TODO(John Sirois): This is not build-dictionary friendly - rework SourceRoot to allow
            # allow for doc of both register (as source_root) and source_root.here(*types).
            class RelativeSourceRoot(object):
              @staticmethod
              def here(*allowed_target_types):
                """Registers the cwd as a source root for the given target types."""
                SourceRoot.register(buildfile_dir, *allowed_target_types)

              def __init__(self, basedir, *allowed_target_types):
                SourceRoot.register(os.path.join(buildfile_dir, basedir), *allowed_target_types)

            eval_globals = copy.copy(pants_context)
            eval_globals.update({
              'ROOT_DIR': buildfile.root_dir,
              '__file__': buildfile.full_path,
              'globs': partial(Fileset.globs, root=buildfile_dir),
              'rglobs': partial(Fileset.rglobs, root=buildfile_dir),
              'zglobs': partial(Fileset.zglobs, root=buildfile_dir),
              'source_root': RelativeSourceRoot,
              'bundle': RelativeBundle
            })
            eval_globals.update(globalargs)
            Compatibility.exec_function(buildfile.code(), eval_globals)
Ejemplo n.º 10
0
def test_buffer_tail():
  writer = Compatibility.StringIO()
  buf = Buffer(writer, infinite=False)  # infinite ==> eof is end of line
  assert buf.next() is None
  write_and_rewind(writer, '1234')
  assert buf.next() == '1234'

  writer = Compatibility.StringIO()
  buf = Buffer(writer, infinite=True)  # infinite ==> eof is end of line
  assert buf.next() is None
  write_and_rewind(writer, '1234')
  assert buf.next() is None
  write_and_rewind(writer, '\n')
  assert buf.next() == '1234'
Ejemplo n.º 11
0
  def _generate_coverage_config(self, source_mappings):
    # For the benefit of macos testing, add the 'real' path the the directory as an equivalent.
    def add_realpath(path):
      realpath = os.path.realpath(path)
      if realpath != canonical and realpath not in alternates:
        realpaths.add(realpath)

    cp = configparser.SafeConfigParser()
    cp.readfp(Compatibility.StringIO(self.DEFAULT_COVERAGE_CONFIG))

    # We use the source_mappings to setup the `combine` coverage command to transform paths in
    # coverage data files into canonical form.
    # See the "[paths]" entry here: http://nedbatchelder.com/code/coverage/config.html for details.
    cp.add_section('paths')
    for canonical, alternates in source_mappings.items():
      key = canonical.replace(os.sep, '.')
      realpaths = set()
      add_realpath(canonical)
      for path in alternates:
        add_realpath(path)
      cp.set('paths', key, self._format_string_list([canonical] + list(alternates) + list(realpaths)))

    # See the debug options here: http://nedbatchelder.com/code/coverage/cmd.html#cmd-run-debug
    if self._debug:
      debug_options = self._format_string_list([
          # Dumps the coverage config realized values.
          'config',
          # Logs which files are skipped or traced and why.
          'trace'])
      cp.set('run', 'debug', debug_options)

    return cp
Ejemplo n.º 12
0
def test_stream():
  stream = Stream(sio(), (GlogLine,))
  lines = read_all(stream, terminator=Stream.EOF)
  assert len(lines) == 3
  last_line = lines[-1]
  # does assembly of trailing non-GlogLines work properly?
  assert last_line.raw.startswith('I1101')
  assert TEST_GLOG_LINES[-len(last_line.raw):] == last_line.raw

  # test tailed logs
  writer = Compatibility.StringIO()
  stream = Stream(writer, (GlogLine,), infinite=True)
  assert stream.next() is None
  write_and_rewind(writer, lines[0].raw)
  assert stream.next() is None
  write_and_rewind(writer, '\n')

  # this is somewhat counterintuitive behavior -- we need to see two log lines in order
  # to print one, simply because otherwise we don't know if the current line is finished.
  # you could imagine a scenario, however, when you'd want (after a certain duration)
  # to print out whatever is in the buffers regardless.  this should probably be the
  # default behavior in infinite=True, but it will add a lot of complexity to the
  # implementation.
  assert stream.next() is None
  write_and_rewind(writer, lines[1].raw)
  assert stream.next() == lines[0]

  assert stream.next() is None
  write_and_rewind(writer, '\n')
  assert stream.next() == None
  write_and_rewind(writer, lines[2].raw)
  assert stream.next() == lines[1]
Ejemplo n.º 13
0
  def test_code(self):
    with safe_open(self.fullpath('BUILD.code'), 'w') as fp:
      fp.write('lib = java_library(name="jake", age=42)')
    build_file = self.create_buildfile('BUILD.code')

    parsed_locals = Compatibility.exec_function(build_file.code(), {'java_library': dict})
    lib = parsed_locals.pop('lib', None)
    self.assertEqual(dict(name='jake', age=42), lib)
Ejemplo n.º 14
0
def generate_coverage_config(target):
  cp = configparser.ConfigParser()
  cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG))
  cp.add_section('html')
  target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage',
      os.path.dirname(target.address.buildfile.relpath), target.name)
  safe_mkdir(target_dir)
  cp.set('html', 'directory', target_dir)
  return cp
Ejemplo n.º 15
0
      def _parse():
        start = os.path.abspath(os.curdir)
        try:
          os.chdir(self.buildfile.parent_path)
          for buildfile in buildfile_family:
            self.buildfile = buildfile
            eval_globals = copy.copy(pants_context)
            eval_globals.update({
              'ROOT_DIR': buildfile.root_dir,
              '__file__': buildfile.full_path,

              # TODO(John Sirois): kill PANTS_NEW and its usages when pants.new is rolled out
              'PANTS_NEW': False
            })
            eval_globals.update(globals)
            Compatibility.exec_function(buildfile.code(), eval_globals)
        finally:
          os.chdir(start)
Ejemplo n.º 16
0
    def test_dump(self):
        props = OrderedDict()
        props['a'] = 1
        props['b'] = '''2
'''
        props['c'] = ' 3 : ='
        out = Compatibility.StringIO()
        Properties.dump(props, out)
        self.assertEquals('a=1\nb=2\\\n\nc=\\ 3\\ \\:\\ \\=\n', out.getvalue())
Ejemplo n.º 17
0
  def parse_build_file(self, build_file):
    """Capture Addressable instances from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and return the
    Addressable instances generated by executing the code.
    """

    logger.debug("Parsing BUILD file {build_file}."
                 .format(build_file=build_file))

    try:
      build_file_code = build_file.code()
    except Exception:
      logger.exception("Error parsing {build_file}.".format(build_file=build_file))
      traceback.print_exc()
      raise

    parse_state = self._build_configuration.initialize_parse_state(build_file)
    try:
      Compatibility.exec_function(build_file_code, parse_state.parse_globals)
    except Exception:
      logger.exception("Error parsing {build_file}.".format(build_file=build_file))
      traceback.print_exc()
      raise

    address_map = {}
    for address, addressable in parse_state.registered_addressable_instances:
      logger.debug('Adding {addressable} to the BuildFileParser address map with {address}'
                   .format(addressable=addressable,
                           address=address))
      if address in address_map:
        conflicting_addressable = address_map[address]
        raise BuildFileParser.TargetConflictException(
          "File {conflicting_file} defines address '{target_name}' more than once."
          .format(conflicting_file=address.build_file,
                  target_name=address.target_name))
      address_map[address] = addressable

    logger.debug("{build_file} produced the following Addressables:"
                 .format(build_file=build_file))
    for address, addressable in address_map.items():
      logger.debug("  * {address}: {addressable}"
                   .format(address=address,
                           addressable=addressable))
    return address_map
Ejemplo n.º 18
0
    def load_module(self, fullmodname):
        """PEP-302-compliant load_module() method.

    Args:
      fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.

    Returns:
      The module object constructed from the source code.

    Raises:
      SyntaxError if the module's source code is syntactically incorrect.
      ImportError if there was a problem accessing the source code.
      Whatever else can be raised by executing the module's source code.
    """
        with self._log_nested('entering load_module(%s)' % fullmodname,
                              at_level=3):
            submodname, is_package, fullpath, code = self._get_code(
                fullmodname)
            mod = sys.modules.get(fullmodname)
            try:
                if mod is None:
                    mod = sys.modules[fullmodname] = types.ModuleType(
                        fullmodname)
                mod.__loader__ = self
                mod.__file__ = fullpath
                mod.__name__ = fullmodname
                self._log('** __file__ = %s' % mod.__file__, at_level=4)
                self._log('** __name__ = %s' % mod.__name__, at_level=4)
                if is_package:
                    mod.__path__ = [os.path.dirname(mod.__file__)]
                    self._log('** __path__ = %s' % mod.__path__, at_level=4)
                Compatibility.exec_function(code, mod.__dict__)
            except Exception as e:
                self._log('Caught exception: %s' % e)
                if fullmodname in sys.modules:
                    del sys.modules[fullmodname]
                raise
        self._log('exiting load_module(%s) => __file__ = %s, __name__ = %s' %
                  (fullmodname, mod.__file__, mod.__name__),
                  at_level=3)
        # We have to do this because of modules like _apipkg that rewrite sys.modules and
        # expect that to be what gets written into the global namespace.
        return sys.modules.get(fullmodname)
Ejemplo n.º 19
0
 def write(self, filename):
   chroot = self._env.chroot().dup()
   chroot.zip(filename + '~')
   with open(filename, "wb") as pexfile:
     pexfile.write(Compatibility.to_bytes('%s\n' % self._identity.hashbang()))
     with open(filename + '~', 'rb') as pexfile_zip:
       pexfile.write(pexfile_zip.read())
   chroot.delete()
   os.unlink(filename + '~')
   chmod_plus_x(filename)
Ejemplo n.º 20
0
    def _sources_hash(self, paths):
        """Generate SHA1 digest from the content of all files under the given paths."""
        sha = hashlib.sha1()

        for relative_filename, filename in self._walk_paths(paths):
            with open(filename, "rb") as fd:
                sha.update(Compatibility.to_bytes(relative_filename))
                sha.update(fd.read())

        return sha
Ejemplo n.º 21
0
  def _sources_hash(self, paths):
    """Generate SHA1 digest from the content of all files under the given paths."""
    sha = hashlib.sha1()

    for relative_filename, filename in self._walk_paths(paths):
      with open(filename, "rb") as fd:
        sha.update(Compatibility.to_bytes(relative_filename))
        sha.update(fd.read())

    return sha.hexdigest()
Ejemplo n.º 22
0
    def _sources_hash(self, sha, paths):
        """Update a SHA1 digest with the content of all files under the given paths.

    :returns: The files found under the given paths.
    """
        files = []
        for relative_filename, filename in self._walk_paths(paths):
            with open(filename, "rb") as fd:
                sha.update(Compatibility.to_bytes(relative_filename))
                sha.update(fd.read())
            files.append(filename)
        return files
Ejemplo n.º 23
0
 def synthesize_thread_stacks():
   threads = dict([(th.ident, th) for th in threading.enumerate()])
   ostr = Compatibility.StringIO()
   # _current_frames not yet implemented on pypy and not guaranteed anywhere but
   # cpython in practice.
   if hasattr(sys, '_current_frames') and (len(sys._current_frames()) > 1 or
       sys._current_frames().values()[0] != inspect.currentframe()):
     # Multi-threaded
     ostr.write('\nAll threads:\n')
     for thread_id, stack in sys._current_frames().items():
       BasicExceptionHandler.print_stack(thread_id, threads[thread_id], stack, ostr, indent=2)
   return ostr.getvalue()
Ejemplo n.º 24
0
  def _sources_hash(self, sha, paths):
    """Update a SHA1 digest with the content of all files under the given paths.

    :returns: The number of files found under the given paths.
    """
    num_files = 0
    for relative_filename, filename in self._walk_paths(paths):
      with open(filename, "rb") as fd:
        sha.update(Compatibility.to_bytes(relative_filename))
        sha.update(fd.read())
      num_files += 1
    return num_files
Ejemplo n.º 25
0
  def load_module(self, fullmodname):
    """PEP-302-compliant load_module() method.

    Args:
      fullmodname: The dot-separated full module name, e.g. 'django.core.mail'.

    Returns:
      The module object constructed from the source code.

    Raises:
      SyntaxError if the module's source code is syntactically incorrect.
      ImportError if there was a problem accessing the source code.
      Whatever else can be raised by executing the module's source code.
    """
    with self._log_nested('entering load_module(%s)' % fullmodname, at_level=3):
      submodname, is_package, fullpath, code = self._get_code(fullmodname)
      mod = sys.modules.get(fullmodname)
      try:
        if mod is None:
          mod = sys.modules[fullmodname] = types.ModuleType(fullmodname)
        mod.__loader__ = self
        mod.__file__ = fullpath
        mod.__name__ = fullmodname
        self._log('** __file__ = %s' % mod.__file__, at_level=4)
        self._log('** __name__ = %s' % mod.__name__, at_level=4)
        if is_package:
          mod.__path__ = [os.path.dirname(mod.__file__)]
          self._log('** __path__ = %s' % mod.__path__, at_level=4)
        Compatibility.exec_function(code, mod.__dict__)
      except Exception as e:
        self._log('Caught exception: %s' % e)
        if fullmodname in sys.modules:
          del sys.modules[fullmodname]
        raise
    self._log('exiting load_module(%s) => __file__ = %s, __name__ = %s' % (
      fullmodname, mod.__file__, mod.__name__), at_level=3)
    # We have to do this because of modules like _apipkg that rewrite sys.modules and
    # expect that to be what gets written into the global namespace.
    return sys.modules.get(fullmodname)
Ejemplo n.º 26
0
def generate_coverage_config(targets):
  cp = configparser.ConfigParser()
  cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG))
  cp.add_section('html')
  if len(targets) == 1:
    target = targets[0]
    relpath = os.path.join(os.path.dirname(target.address.buildfile.relpath), target.name)
  else:
    relpath = Target.maybe_readable_identify(targets)
  target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage', relpath)
  safe_mkdir(target_dir)
  cp.set('html', 'directory', target_dir)
  return cp
Ejemplo n.º 27
0
def test_tracing_filter():
    sio = Compatibility.StringIO()
    tracer = Tracer(output=sio)
    tracer.log('hello world')
    assert sio.getvalue() == 'hello world\n'

    sio = Compatibility.StringIO()
    tracer = Tracer(output=sio, predicate=lambda v: v >= 1)
    tracer.log('hello world')
    assert sio.getvalue() == ''
    tracer.log('hello world', V=1)
    assert sio.getvalue() == 'hello world\n'
    tracer.log('ehrmagherd', V=2)
    assert sio.getvalue() == 'hello world\nehrmagherd\n'

    sio = Compatibility.StringIO()
    tracer = Tracer(output=sio, predicate=lambda v: (v % 2 == 0))
    tracer.log('hello world', V=0)
    assert sio.getvalue() == 'hello world\n'
    tracer.log('morf gorf', V=1)
    assert sio.getvalue() == 'hello world\n'
    tracer.log('ehrmagherd', V=2)
    assert sio.getvalue() == 'hello world\nehrmagherd\n'
Ejemplo n.º 28
0
  def default_globals(cls, config=None):
    """
    Has pants.*, but not file-specfic things like __file__
    If you want to add new imports to be available to all BUILD files, add a section to the config
    similar to:

      [parse]
      headers: ['from test import get_jar',]

    You may also need to add new roots to the sys.path. see _run in pants_exe.py
    """
    to_exec = list(cls._strs_to_exec)
    if config:
      # TODO: This can be replaced once extensions are enabled with
      # https://github.com/pantsbuild/pants/issues/5
      to_exec.extend(config.getlist('parse', 'headers', default=[]))

    pants_context = {}
    for str_to_exec in to_exec:
      ast = compile(str_to_exec, '<string>', 'exec')
      Compatibility.exec_function(ast, pants_context)

    return pants_context
Ejemplo n.º 29
0
    def default_globals(cls, config=None):
        """
    Has twitter.pants.*, but not file-specfic things like __file__
    If you want to add new imports to be available to all BUILD files, add a section to the config
    similar to:

      [parse]
      headers: ['from test import get_jar',]

    You may also need to add new roots to the sys.path. see _run in pants_exe.py
    """
        to_exec = list(cls._strs_to_exec)
        if config:
            # TODO: This can be replaced once extensions are enabled with
            # https://github.com/pantsbuild/pants/issues/5
            to_exec.extend(config.getlist('parse', 'headers', default=[]))

        pants_context = {}
        for str_to_exec in to_exec:
            ast = compile(str_to_exec, '<string>', 'exec')
            Compatibility.exec_function(ast, pants_context)

        return pants_context
Ejemplo n.º 30
0
    def parse(self, **global_args):
        """The entry point to parsing of a BUILD file.  Changes the working directory to the BUILD file
    directory and then evaluates the BUILD file with the ROOT_DIR and __file__ globals set in
    addition to any globals specified as kwargs.  As target methods are parsed they can examine the
    stack to find these globals and thus locate themselves for the purposes of finding files
    (see locate() and bind())."""

        if self.buildfile not in ParseContext._parsed:
            buildfile_family = tuple(self.buildfile.family())

            pants_context = {}
            for str_to_exec in self._strs_to_exec:
                ast = compile(str_to_exec, '<string>', 'exec')
                Compatibility.exec_function(ast, pants_context)

            with ParseContext.activate(self):
                start = os.path.abspath(os.curdir)
                try:
                    os.chdir(self.buildfile.parent_path)
                    for buildfile in buildfile_family:
                        # We may have traversed a sibling already, guard against re-parsing it.
                        if buildfile not in ParseContext._parsed:
                            ParseContext._parsed.add(buildfile)

                            eval_globals = copy.copy(pants_context)
                            eval_globals.update({
                                'ROOT_DIR':
                                buildfile.root_dir,
                                '__file__':
                                buildfile.full_path,
                            })
                            eval_globals.update(global_args)
                            Compatibility.exec_function(
                                buildfile.code(), eval_globals)
                finally:
                    os.chdir(start)
Ejemplo n.º 31
0
 def _get_code(self, fullmodname):
   submodname, is_package, relpath = self._get_info(fullmodname)
   relsplit, _ = os.path.split(relpath)
   fullpath = '%s%s%s' % (self.archive, os.sep, relpath)
   pyc = os.path.splitext(fullpath)[0] + '.pyc'
   try:
     with timed('Unmarshaling %s' % pyc, at_level=2):
       pyc_object = CodeMarshaller.from_pyc(Compatibility.BytesIO(Nested.read(pyc)))
   except (Nested.FileNotFound, ValueError, CodeMarshaller.InvalidCode) as e:
     with timed('Compiling %s because of %s' % (fullpath, e.__class__.__name__), at_level=2):
       py = Nested.read(fullpath)
       assert py is not None
       if Compatibility.PY3:
         py = py.decode('utf8')
       pyc_object = CodeMarshaller.from_py(py, fullpath)
   return submodname, is_package, fullpath, pyc_object.code
Ejemplo n.º 32
0
 def build(self, filename):
   self.freeze()
   try:
     os.unlink(filename + '~')
     self._logger.warn('Previous binary unexpectedly exists, cleaning: %s' % (filename + '~'))
   except OSError:
     # The expectation is that the file does not exist, so continue
     pass
   with open(filename + '~', 'ab') as pexfile:
     assert os.path.getsize(pexfile.name) == 0
     # TODO(wickman) Make this tunable
     pexfile.write(Compatibility.to_bytes('%s\n' % PythonIdentity.get().hashbang()))
   self._chroot.zip(filename + '~', mode='a')
   if os.path.exists(filename):
     os.unlink(filename)
   os.rename(filename + '~', filename)
   chmod_plus_x(filename)
Ejemplo n.º 33
0
 def build(self, filename):
   self.freeze()
   try:
     os.unlink(filename + '~')
     print('WARNING: Previous binary unexpectedly exists, cleaning: %s' % (filename + '~'))
   except OSError:
     # The expectation is that the file does not exist, so continue
     pass
   with open(filename + '~', 'ab') as pexfile:
     assert os.path.getsize(pexfile.name) == 0
     # TODO(wickman) Make this tunable
     pexfile.write(Compatibility.to_bytes('%s\n' % PythonIdentity.get().hashbang()))
   self._chroot.zip(filename + '~', mode='a')
   if os.path.exists(filename):
     os.unlink(filename)
   os.rename(filename + '~', filename)
   chmod_plus_x(filename)
Ejemplo n.º 34
0
  def test_download_listener(self):
    downloaded = ''
    for chunk in self.expect_get('http://foo', chunk_size_bytes=1048576, timeout_secs=3600):
      self.listener.recv_chunk(chunk)
      downloaded += chunk

    self.listener.finished()
    self.response.close()

    self.mox.ReplayAll()

    with closing(Compatibility.StringIO()) as fp:
      self.fetcher.fetch('http://foo',
                         Fetcher.DownloadListener(fp).wrap(self.listener),
                         chunk_size=Amount(1, Data.MB),
                         timeout=Amount(1, Time.HOURS))
      self.assertEqual(downloaded, fp.getvalue())
Ejemplo n.º 35
0
def test_tracing_timed():
    sio = Compatibility.StringIO()
    clock = ThreadedClock()
    final_trace = []

    class PrintTraceInterceptor(Tracer):
        def print_trace(self, *args, **kw):
            final_trace.append(self._local.parent)

    tracer = PrintTraceInterceptor(output=sio,
                                   clock=clock,
                                   predicate=lambda v: False)
    assert not hasattr(tracer._local, 'parent')

    with tracer.timed('hello'):
        clock.tick(1.0)
        with tracer.timed('world 1'):
            clock.tick(1.0)
        with tracer.timed('world 2'):
            clock.tick(1.0)

    assert len(final_trace) == 1
    final_trace = final_trace[0]
    assert final_trace._start == 0
    assert final_trace._stop == 3
    assert final_trace.duration() == 3
    assert final_trace.msg == 'hello'
    assert len(final_trace.children) == 2
    child = final_trace.children[0]
    assert child._start == 1
    assert child._stop == 2
    assert child.parent is final_trace
    assert child.msg == 'world 1'
    child = final_trace.children[1]
    assert child._start == 2
    assert child._stop == 3
    assert child.parent is final_trace
    assert child.msg == 'world 2'

    # should not log if verbosity low
    assert sio.getvalue() == ''
Ejemplo n.º 36
0
 def iter_tokens(cls, blob):
     """ Iterate over tokens found in blob contents
 :param blob: Input string with python file contents
 :return: token iterator
 """
     return tokenize.generate_tokens(Compatibility.StringIO(blob).readline)
Ejemplo n.º 37
0
  def parse_build_file(self, build_file):
    """Capture TargetProxies from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and record the
    TargetProxies generated by executing the code.
    """

    if build_file in self._added_build_files:
      logger.debug('BuildFile {build_file} has already been parsed.'
                   .format(build_file=build_file))
      return

    logger.debug("Parsing BUILD file {build_file}."
                 .format(build_file=build_file))

    parse_context = {}

    # TODO(pl): Don't inject __file__ into the context.  BUILD files should not be aware
    # of their location on the filesystem.
    parse_context['__file__'] = build_file.full_path

    parse_context.update(self._exposed_objects)
    parse_context.update(
      (key, partial(util, rel_path=build_file.spec_path)) for
      key, util in self._partial_path_relative_utils.items()
    )
    parse_context.update(
      (key, util(rel_path=build_file.spec_path)) for
      key, util in self._applicative_path_relative_utils.items()
    )
    registered_target_proxies = set()
    parse_context.update(
      (alias, TargetCallProxy(target_type=target_type,
                              build_file=build_file,
                              registered_target_proxies=registered_target_proxies)) for
      alias, target_type in self._target_alias_map.items()
    )

    try:
      build_file_code = build_file.code()
    except:
      logger.exception("Error parsing {build_file}."
                       .format(build_file=build_file))
      traceback.print_exc()
      raise

    try:
      Compatibility.exec_function(build_file_code, parse_context)
    except:
      logger.exception("Error running {build_file}."
                       .format(build_file=build_file))
      traceback.print_exc()
      raise

    for target_proxy in registered_target_proxies:
      logger.debug('Adding {target_proxy} to the proxy build graph with {address}'
                   .format(target_proxy=target_proxy,
                           address=target_proxy.address))

      assert target_proxy.address not in self._target_proxy_by_address, (
        '{address} already in BuildGraph._target_proxy_by_address even though this BUILD file has'
        ' not yet been added to the BuildGraph.  The target type is: {target_type}'
        .format(address=target_proxy.address,
                target_type=target_proxy.target_type))

      assert target_proxy.address not in self.addresses_by_build_file[build_file], (
        '{address} has already been associated with {build_file} in the build graph.'
        .format(address=target_proxy.address,
                build_file=build_file))

      self._target_proxy_by_address[target_proxy.address] = target_proxy
      self.addresses_by_build_file[build_file].add(target_proxy.address)
      self._target_proxies_by_build_file[build_file].add(target_proxy)
    self._added_build_files.add(build_file)

    logger.debug("{build_file} produced the following TargetProxies:"
                 .format(build_file=build_file))
    for target_proxy in registered_target_proxies:
      logger.debug("  * {target_proxy}".format(target_proxy=target_proxy))
Ejemplo n.º 38
0
def sio():
  return Compatibility.StringIO(TEST_GLOG_LINES)
Ejemplo n.º 39
0
    def parse_build_file(self, build_file):
        """Capture TargetProxies from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and record the
    TargetProxies generated by executing the code.
    """

        if build_file in self._added_build_files:
            logger.debug(
                'BuildFile {build_file} has already been parsed.'.format(
                    build_file=build_file))
            return

        logger.debug(
            "Parsing BUILD file {build_file}.".format(build_file=build_file))

        parse_context = {}

        # TODO(pl): Don't inject __file__ into the context.  BUILD files should not be aware
        # of their location on the filesystem.
        parse_context['__file__'] = build_file.full_path

        parse_context.update(self._exposed_objects)
        parse_context.update(
            (key, partial(util, rel_path=build_file.spec_path))
            for key, util in self._partial_path_relative_utils.items())
        parse_context.update(
            (key, util(rel_path=build_file.spec_path))
            for key, util in self._applicative_path_relative_utils.items())
        registered_target_proxies = set()
        parse_context.update(
            (alias,
             TargetCallProxy(
                 target_type=target_type,
                 build_file=build_file,
                 registered_target_proxies=registered_target_proxies))
            for alias, target_type in self._target_alias_map.items())

        try:
            build_file_code = build_file.code()
        except:
            logger.exception(
                "Error parsing {build_file}.".format(build_file=build_file))
            traceback.print_exc()
            raise

        try:
            Compatibility.exec_function(build_file_code, parse_context)
        except:
            logger.exception(
                "Error running {build_file}.".format(build_file=build_file))
            traceback.print_exc()
            raise

        for target_proxy in registered_target_proxies:
            logger.debug(
                'Adding {target_proxy} to the proxy build graph with {address}'
                .format(target_proxy=target_proxy,
                        address=target_proxy.address))

            assert target_proxy.address not in self._target_proxy_by_address, (
                '{address} already in BuildGraph._target_proxy_by_address even though this BUILD file has'
                ' not yet been added to the BuildGraph.  The target type is: {target_type}'
                .format(address=target_proxy.address,
                        target_type=target_proxy.target_type))

            assert target_proxy.address not in self.addresses_by_build_file[
                build_file], (
                    '{address} has already been associated with {build_file} in the build graph.'
                    .format(address=target_proxy.address,
                            build_file=build_file))

            self._target_proxy_by_address[target_proxy.address] = target_proxy
            self.addresses_by_build_file[build_file].add(target_proxy.address)
            self._target_proxies_by_build_file[build_file].add(target_proxy)
        self._added_build_files.add(build_file)

        logger.debug(
            "{build_file} produced the following TargetProxies:".format(
                build_file=build_file))
        for target_proxy in registered_target_proxies:
            logger.debug(
                "  * {target_proxy}".format(target_proxy=target_proxy))
Ejemplo n.º 40
0
    def parse_build_file(self, build_file):
        """Capture TargetProxies from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and record the
    TargetProxies generated by executing the code.
    """

        if build_file in self._added_build_files:
            logger.debug(
                'BuildFile {build_file} has already been parsed.'.format(
                    build_file=build_file))
            return

        logger.debug(
            "Parsing BUILD file {build_file}.".format(build_file=build_file))

        parse_context = {}

        # TODO(pl): Don't inject __file__ into the context.  BUILD files should not be aware
        # of their location on the filesystem.
        parse_context['__file__'] = build_file.full_path

        parse_context.update(self._exposed_objects)
        parse_context.update(
            (key, partial(util, rel_path=build_file.spec_path))
            for key, util in self._partial_path_relative_utils.items())
        parse_context.update(
            (key, util(rel_path=build_file.spec_path))
            for key, util in self._applicative_path_relative_utils.items())
        registered_target_proxies = set()
        parse_context.update(
            (alias,
             TargetCallProxy(
                 target_type=target_type,
                 build_file=build_file,
                 registered_target_proxies=registered_target_proxies))
            for alias, target_type in self._target_alias_map.items())

        for key, func in self._target_creation_utils.items():
            parse_context.update({key: partial(func, alias_map=parse_context)})

        try:
            build_file_code = build_file.code()
        except Exception:
            logger.exception(
                "Error parsing {build_file}.".format(build_file=build_file))
            traceback.print_exc()
            raise

        try:
            Compatibility.exec_function(build_file_code, parse_context)
        except Exception:
            logger.exception(
                "Error running {build_file}.".format(build_file=build_file))
            traceback.print_exc()
            raise

        for target_proxy in registered_target_proxies:
            logger.debug(
                'Adding {target_proxy} to the proxy build graph with {address}'
                .format(target_proxy=target_proxy,
                        address=target_proxy.address))

            if target_proxy.address in self._target_proxy_by_address:
                conflicting_target = self._target_proxy_by_address[
                    target_proxy.address]
                if (conflicting_target.address.build_file !=
                        target_proxy.address.build_file):
                    raise BuildFileParser.SiblingConflictException(
                        "Both {conflicting_file} and {target_file} define the same target '{target_name}'"
                        .format(conflicting_file=conflicting_target.address.
                                build_file,
                                target_file=target_proxy.address.build_file,
                                target_name=conflicting_target.address.
                                target_name))
                raise BuildFileParser.TargetConflictException(
                    "File {conflicting_file} defines target '{target_name}' more than once."
                    .format(
                        conflicting_file=conflicting_target.address.build_file,
                        target_name=conflicting_target.address.target_name))

            assert target_proxy.address not in self.addresses_by_build_file[
                build_file], (
                    '{address} has already been associated with {build_file} in the build graph.'
                    .format(address=target_proxy.address,
                            build_file=build_file))

            self._target_proxy_by_address[target_proxy.address] = target_proxy
            self.addresses_by_build_file[build_file].add(target_proxy.address)
            self._target_proxies_by_build_file[build_file].add(target_proxy)
        self._added_build_files.add(build_file)

        logger.debug(
            "{build_file} produced the following TargetProxies:".format(
                build_file=build_file))
        for target_proxy in registered_target_proxies:
            logger.debug(
                "  * {target_proxy}".format(target_proxy=target_proxy))
Ejemplo n.º 41
0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

import code

from twitter.common.lang import Compatibility

from apache.aurora.config.loader import AuroraConfigLoader

code.interact('Mesos Config REPL',
    local=Compatibility.exec_function(AuroraConfigLoader.DEFAULT_SCHEMA, globals()))
Ejemplo n.º 42
0
    def parse(self, **globalargs):
        """The entry point to parsing of a BUILD file.

    from twitter.pants.targets.sources import SourceRoot

    See locate().
    """
        if self.buildfile not in ParseContext._parsed:
            buildfile_family = tuple(self.buildfile.family())

            pants_context = self.default_globals(Config.load())

            with ParseContext.activate(self):
                for buildfile in buildfile_family:
                    self._active_buildfile = buildfile
                    # We may have traversed a sibling already, guard against re-parsing it.
                    if buildfile not in ParseContext._parsed:
                        ParseContext._parsed.add(buildfile)

                        buildfile_dir = os.path.dirname(buildfile.full_path)

                        # TODO(John Sirois): XXX imports are done here to prevent a cycles
                        from twitter.pants.targets.jvm_binary import Bundle
                        from twitter.pants.targets.sources import SourceRoot

                        class RelativeBundle(Bundle):
                            def __init__(self, mapper=None, relative_to=None):
                                super(RelativeBundle,
                                      self).__init__(base=buildfile_dir,
                                                     mapper=mapper,
                                                     relative_to=relative_to)

                        # TODO(John Sirois): This is not build-dictionary friendly - rework SourceRoot to allow
                        # allow for doc of both register (as source_root) and source_root.here(*types).
                        class RelativeSourceRoot(object):
                            @staticmethod
                            def here(*allowed_target_types):
                                """Registers the cwd as a source root for the given target types."""
                                SourceRoot.register(buildfile_dir,
                                                    *allowed_target_types)

                            def __init__(self, basedir, *allowed_target_types):
                                SourceRoot.register(
                                    os.path.join(buildfile_dir, basedir),
                                    *allowed_target_types)

                        eval_globals = copy.copy(pants_context)
                        eval_globals.update({
                            'ROOT_DIR':
                            buildfile.root_dir,
                            '__file__':
                            buildfile.full_path,
                            'globs':
                            partial(Fileset.globs, root=buildfile_dir),
                            'rglobs':
                            partial(Fileset.rglobs, root=buildfile_dir),
                            'zglobs':
                            partial(Fileset.zglobs, root=buildfile_dir),
                            'source_root':
                            RelativeSourceRoot,
                            'bundle':
                            RelativeBundle
                        })
                        eval_globals.update(globalargs)
                        Compatibility.exec_function(buildfile.code(),
                                                    eval_globals)
Ejemplo n.º 43
0
 def format(exctype, value, tb):
   ostr = Compatibility.StringIO()
   ostr.write('Uncaught exception:\n')
   ostr.write(''.join(traceback.format_exception(exctype, value, tb)))
   ostr.write(BasicExceptionHandler.synthesize_thread_stacks())
   return ostr.getvalue()
Ejemplo n.º 44
0
  def parse_build_file(self, build_file):
    """Capture TargetProxies from parsing `build_file`.

    Prepare a context for parsing, read a BUILD file from the filesystem, and record the
    TargetProxies generated by executing the code.
    """

    if build_file in self._added_build_files:
      logger.debug('BuildFile {build_file} has already been parsed.'
                   .format(build_file=build_file))
      return

    logger.debug("Parsing BUILD file {build_file}.".format(build_file=build_file))

    try:
      build_file_code = build_file.code()
    except Exception:
      logger.exception("Error parsing {build_file}.".format(build_file=build_file))
      traceback.print_exc()
      raise

    parse_state = self._build_configuration.initialize_parse_state(build_file)
    try:
      Compatibility.exec_function(build_file_code, parse_state.parse_globals)
    except Exception:
      logger.exception("Error parsing {build_file}.".format(build_file=build_file))
      traceback.print_exc()
      raise

    for target_proxy in parse_state.registered_target_proxies:
      logger.debug('Adding {target_proxy} to the proxy build graph with {address}'
                   .format(target_proxy=target_proxy,
                           address=target_proxy.address))

      if target_proxy.address in self._target_proxy_by_address:
        conflicting_target = self._target_proxy_by_address[target_proxy.address]
        if conflicting_target.address.build_file != target_proxy.address.build_file:
          raise BuildFileParser.SiblingConflictException(
              "Both {conflicting_file} and {target_file} define the same target '{target_name}'"
              .format(conflicting_file=conflicting_target.address.build_file,
                      target_file=target_proxy.address.build_file,
                      target_name=conflicting_target.address.target_name))
        raise BuildFileParser.TargetConflictException(
            "File {conflicting_file} defines target '{target_name}' more than once."
            .format(conflicting_file=conflicting_target.address.build_file,
                    target_name=conflicting_target.address.target_name))

      assert target_proxy.address not in self.addresses_by_build_file[build_file], (
          '{address} has already been associated with {build_file} in the build graph.'
          .format(address=target_proxy.address,
                  build_file=build_file))

      self._target_proxy_by_address[target_proxy.address] = target_proxy
      self.addresses_by_build_file[build_file].add(target_proxy.address)
      self._target_proxies_by_build_file[build_file].add(target_proxy)
    self._added_build_files.add(build_file)

    logger.debug("{build_file} produced the following TargetProxies:"
                 .format(build_file=build_file))
    for target_proxy in parse_state.registered_target_proxies:
      logger.debug("  * {target_proxy}".format(target_proxy=target_proxy))

    return parse_state.registered_target_proxies
Ejemplo n.º 45
0
  def parse_build_file(self, build_file):
    """Capture Addressable instances from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and return the
    Addressable instances generated by executing the code.
    """

    def _format_context_msg(lineno, offset, error_type, message):
      """Show the line of the BUILD file that has the error along with a few line of context"""
      with open(build_file.full_path, "r") as build_contents:
        context = "Error parsing {path}:\n".format(path=build_file.full_path)
        curr_lineno = 0
        for line in build_contents.readlines():
          curr_lineno += 1
          if curr_lineno == lineno:
            highlight = '*'
          else:
            highlight = ' '
          if curr_lineno >= lineno - 3:
            context += "{highlight}{curr_lineno:4d}: {line}".format(
              highlight=highlight, line=line, curr_lineno=curr_lineno)
            if offset and lineno == curr_lineno:
              context += "       {caret:>{width}} {error_type}: {message}\n\n" \
                .format(caret="^", width=int(offset), error_type=error_type,
                        message=message)
          if curr_lineno > lineno + 3:
            break
        return context

    logger.debug("Parsing BUILD file {build_file}."
                 .format(build_file=build_file))

    try:
      build_file_code = build_file.code()
    except SyntaxError as e:
      raise self.ParseError(_format_context_msg(e.lineno, e.offset, e.__class__.__name__, e))
    except Exception as e:
        raise self.ParseError("{error_type}: {message}\n while parsing BUILD file {build_file}"
                              .format(error_type=e.__class__.__name__,
                                      message=e, build_file=build_file))

    parse_state = self._build_configuration.initialize_parse_state(build_file)
    try:
      Compatibility.exec_function(build_file_code, parse_state.parse_globals)
    except Exception as e:
      raise self.ExecuteError("{message}\n while executing BUILD file {build_file}"
                              .format(message=e, build_file=build_file))

    address_map = {}
    for address, addressable in parse_state.registered_addressable_instances:
      logger.debug('Adding {addressable} to the BuildFileParser address map with {address}'
                   .format(addressable=addressable,
                           address=address))
      if address in address_map:
        raise self.AddressableConflictException(
          "File {conflicting_file} defines address '{target_name}' more than once."
          .format(conflicting_file=address.build_file,
                  target_name=address.target_name))
      address_map[address] = addressable

    logger.debug("{build_file} produced the following Addressables:"
                 .format(build_file=build_file))
    for address, addressable in address_map.items():
      logger.debug("  * {address}: {addressable}"
                   .format(address=address,
                           addressable=addressable))
    return address_map
Ejemplo n.º 46
0
 def to_pyc(self):
     sio = Compatibility.BytesIO()
     sio.write(struct.pack('I', CodeMarshaller.MAGIC))
     sio.write(struct.pack('I', self._stamp))
     sio.write(marshal.dumps(self._code))
     return sio.getvalue()
Ejemplo n.º 47
0
    def parse_build_file(self, build_file):
        """Capture Addressable instances from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and return the
    Addressable instances generated by executing the code.
    """
        def _format_context_msg(lineno, offset, error_type, message):
            """Show the line of the BUILD file that has the error along with a few line of context"""
            with open(build_file.full_path, "r") as build_contents:
                context = "Error parsing {path}:\n".format(
                    path=build_file.full_path)
                curr_lineno = 0
                for line in build_contents.readlines():
                    curr_lineno += 1
                    if curr_lineno == lineno:
                        highlight = '*'
                    else:
                        highlight = ' '
                    if curr_lineno >= lineno - 3:
                        context += "{highlight}{curr_lineno:4d}: {line}".format(
                            highlight=highlight,
                            line=line,
                            curr_lineno=curr_lineno)
                        if offset and lineno == curr_lineno:
                            context += "       {caret:>{width}} {error_type}: {message}\n\n" \
                              .format(caret="^", width=int(offset), error_type=error_type,
                                      message=message)
                    if curr_lineno > lineno + 3:
                        break
                return context

        logger.debug(
            "Parsing BUILD file {build_file}.".format(build_file=build_file))

        try:
            build_file_code = build_file.code()
        except SyntaxError as e:
            raise self.ParseError(
                _format_context_msg(e.lineno, e.offset, e.__class__.__name__,
                                    e))
        except Exception as e:
            raise self.ParseError(
                "{error_type}: {message}\n while parsing BUILD file {build_file}"
                .format(error_type=e.__class__.__name__,
                        message=e,
                        build_file=build_file))

        parse_state = self._build_configuration.initialize_parse_state(
            build_file)
        try:
            Compatibility.exec_function(build_file_code,
                                        parse_state.parse_globals)
        except Exception as e:
            raise self.ExecuteError(
                "{message}\n while executing BUILD file {build_file}".format(
                    message=e, build_file=build_file))

        address_map = {}
        for address, addressable in parse_state.registered_addressable_instances:
            logger.debug(
                'Adding {addressable} to the BuildFileParser address map with {address}'
                .format(addressable=addressable, address=address))
            if address in address_map:
                raise self.AddressableConflictException(
                    "File {conflicting_file} defines address '{target_name}' more than once."
                    .format(conflicting_file=address.build_file,
                            target_name=address.target_name))
            address_map[address] = addressable

        logger.debug(
            "{build_file} produced the following Addressables:".format(
                build_file=build_file))
        for address, addressable in address_map.items():
            logger.debug("  * {address}: {addressable}".format(
                address=address, addressable=addressable))
        return address_map
Ejemplo n.º 48
0
  def parse_build_file(self, build_file):
    """Capture TargetProxies from parsing `build_file`.
    Prepare a context for parsing, read a BUILD file from the filesystem, and record the
    TargetProxies generated by executing the code.
    """

    if build_file in self._added_build_files:
      logger.debug('BuildFile {build_file} has already been parsed.'
                   .format(build_file=build_file))
      return

    logger.debug("Parsing BUILD file {build_file}."
                 .format(build_file=build_file))

    parse_context = {}

    # TODO(pl): Don't inject __file__ into the context.  BUILD files should not be aware
    # of their location on the filesystem.
    parse_context['__file__'] = build_file.full_path

    parse_context.update(self._exposed_objects)
    parse_context.update(
      (key, partial(util, rel_path=build_file.spec_path)) for
      key, util in self._partial_path_relative_utils.items()
    )
    parse_context.update(
      (key, util(rel_path=build_file.spec_path)) for
      key, util in self._applicative_path_relative_utils.items()
    )
    registered_target_proxies = set()
    parse_context.update(
      (alias, TargetCallProxy(target_type=target_type,
                              build_file=build_file,
                              registered_target_proxies=registered_target_proxies)) for
      alias, target_type in self._target_alias_map.items()
    )

    for key, func in self._target_creation_utils.items():
      parse_context.update({key: partial(func, alias_map=parse_context)})

    try:
      build_file_code = build_file.code()
    except Exception:
      logger.exception("Error parsing {build_file}."
                       .format(build_file=build_file))
      traceback.print_exc()
      raise

    try:
      Compatibility.exec_function(build_file_code, parse_context)
    except Exception:
      logger.exception("Error running {build_file}."
                       .format(build_file=build_file))
      traceback.print_exc()
      raise

    for target_proxy in registered_target_proxies:
      logger.debug('Adding {target_proxy} to the proxy build graph with {address}'
                   .format(target_proxy=target_proxy,
                           address=target_proxy.address))

      if target_proxy.address in self._target_proxy_by_address:
        conflicting_target = self._target_proxy_by_address[target_proxy.address]
        if (conflicting_target.address.build_file != target_proxy.address.build_file):
          raise BuildFileParser.SiblingConflictException(
            "Both {conflicting_file} and {target_file} define the same target '{target_name}'"
            .format(conflicting_file=conflicting_target.address.build_file,
                    target_file=target_proxy.address.build_file,
                    target_name=conflicting_target.address.target_name))
        raise BuildFileParser.TargetConflictException(
          "File {conflicting_file} defines target '{target_name}' more than once."
          .format(conflicting_file=conflicting_target.address.build_file,
                  target_name=conflicting_target.address.target_name))

      assert target_proxy.address not in self.addresses_by_build_file[build_file], (
        '{address} has already been associated with {build_file} in the build graph.'
        .format(address=target_proxy.address,
                build_file=build_file))

      self._target_proxy_by_address[target_proxy.address] = target_proxy
      self.addresses_by_build_file[build_file].add(target_proxy.address)
      self._target_proxies_by_build_file[build_file].add(target_proxy)
    self._added_build_files.add(build_file)

    logger.debug("{build_file} produced the following TargetProxies:"
                 .format(build_file=build_file))
    for target_proxy in registered_target_proxies:
      logger.debug("  * {target_proxy}".format(target_proxy=target_proxy))