Exemple #1
0
 def __init__(self, context):
     self.context = context
     self._cache_key_generator = CacheKeyGenerator()
     # TODO: Shared, remote build cache.
     self._artifact_cache = ArtifactCache(
         context.config.get('tasks', 'artifact_cache'))
     self._build_invalidator_dir = os.path.join(
         context.config.get('tasks', 'build_invalidator'),
         self.product_type())
Exemple #2
0
    def __init__(self, target, root_dir, extra_targets=None, builder=None):
        self._config = Config.load()

        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        artifact_cache_root = \
          os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
        self._artifact_cache = ArtifactCache(artifact_cache_root)
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = PythonResolver([self._target] + self._extra_targets)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())
        self._platforms = (Platform.current(), )
        self._pythons = (sys.version[:3], )

        # TODO(wickman) Should this be in the binary builder?
        if isinstance(self._target, PythonBinary):
            self._platforms = self._target._platforms
            self._pythons = self._target._interpreters
Exemple #3
0
  def __init__(self, target, root_dir, extra_targets=None, builder=None):
    self._config = Config.load()

    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    artifact_cache_root = \
      os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
    self._artifact_cache = ArtifactCache(artifact_cache_root)
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = PythonResolver([self._target] + self._extra_targets)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())
    self._platforms = (Platform.current(),)
    self._pythons = (sys.version[:3],)

    # TODO(wickman) Should this be in the binary builder?
    if isinstance(self._target, PythonBinary):
      self._platforms = self._target._platforms
      self._pythons = self._target._interpreters
Exemple #4
0
class Task(object):
    @classmethod
    def setup_parser(cls, option_group, args, mkflag):
        """
      Subclasses can add flags to the pants command line using the given option group.  Flag names
      should be created with mkflag([name]) to ensure flags are properly namespaced amongst other
      tasks.
    """

    def __init__(self, context):
        self.context = context
        self._cache_key_generator = CacheKeyGenerator()
        # TODO: Shared, remote build cache.
        self._artifact_cache = ArtifactCache(
            context.config.get('tasks', 'artifact_cache'))
        self._build_invalidator_dir = os.path.join(
            context.config.get('tasks', 'build_invalidator'),
            self.product_type())

    def product_type(self):
        """
      By default, each task is considered as creating a unique product type.
      Subclasses can override this to specify a shared product type, e.g., 'classes'.

      Tasks with the same product type can invalidate each other's targets, e.g., if a ScalaLibrary
      depends on a JavaLibrary, a change to the JavaLibrary will invalidated the ScalaLibrary because
      they both have the same product type.
    """
        return self.__class__.__name__

    def execute(self, targets):
        """
      Executes this task against the given targets which may be a subset of the current context
      targets.
    """

    def invalidate_for(self):
        """
      Subclasses can override and return an object that should be checked for changes when
      managing target invalidation.  If the pickled form of returned object changes
      between runs all targets will be invalidated.
    """
        return None

    def invalidate_for_files(self):
        """
      Subclasses can override and return a list of full paths to extra, non-source files that should
      be checked for changes when managing target invalidation. This is useful for tracking
      changes to pre-built build tools, e.g., the thrift compiler.
    """
        return []

    @contextmanager
    def invalidated(self,
                    targets,
                    only_buildfiles=False,
                    invalidate_dependants=False):
        """
      Checks targets for invalidation.

      Yields the result to a with block. If no exceptions are thrown by work in the block, the
      cache is updated for the targets.

      :targets The targets to check for changes.
      :only_buildfiles If True, then just the target's BUILD files are checked for changes.
      :invalidate_dependants If True then any targets depending on changed targets are invalidated.
      :returns: an InvalidationResult reflecting the invalidated targets.
    """
        # invalidate_for() may return an iterable that isn't a set, so we ensure a set here.
        extra_data = []
        extra_data.append(self.invalidate_for())

        for f in self.invalidate_for_files():
            sha = hashlib.sha1()
            with open(f, "rb") as fd:
                sha.update(fd.read())
            extra_data.append(sha.hexdigest())

        cache_manager = CacheManager(self._cache_key_generator,
                                     self._build_invalidator_dir, targets,
                                     invalidate_dependants, extra_data,
                                     only_buildfiles)

        # Check for directly changed targets.
        all_versioned_targets = cache_manager.check(targets)
        invalidation_result = InvalidationResult(cache_manager,
                                                 all_versioned_targets)
        num_invalid_targets = len(invalidation_result.invalid_targets())

        # Do some reporting.
        if num_invalid_targets > 0:
            num_files = reduce(lambda x, y: x + y, [
                vt.cache_key.num_sources
                for vt in all_versioned_targets if not vt.valid
            ], 0)
            self.context.log.info(
                'Operating on %d files in %d invalidated targets' %
                (num_files, num_invalid_targets))

        # Yield the result, and then update the cache.
        if num_invalid_targets > 0:
            self.context.log.debug('Invalidated targets %s' %
                                   invalidation_result.invalid_targets())
        yield invalidation_result
        for vt in invalidation_result.invalid_versioned_targets():
            cache_manager.update(vt.cache_key)

    @contextmanager
    def check_artifact_cache(self, versioned_targets, build_artifacts,
                             artifact_root):
        """
      See if we have required artifacts in the cache.

      If we do (and reading from the artifact cache is enabled) then we copy the artifacts from the cache.
      If we don't (and writing to the artifact cache is enabled) then we will copy the artifacts into
      the cache when the context is exited.

      Therefore the usage idiom is as follows:

      with self.check_artifact_cache(...) as build:
        if build:
          ... build the necessary artifacts ...

      :versioned_targets a VersionedTargetSet representing a specific version of a set of targets.
      :build_artifacts a list of paths to which the artifacts will be written.
      :artifact_root If not None, the artifact paths will be cached relative to this dir.
      :returns: True if the caller must build the artifacts, False otherwise.
    """
        artifact_key = versioned_targets.cache_key
        targets = versioned_targets.targets
        if self.context.options.read_from_artifact_cache and self._artifact_cache.has(
                artifact_key):
            self.context.log.info('Using cached artifacts for %s' % targets)
            self._artifact_cache.use_cached_files(
                artifact_key, lambda src, reldest: shutil.copy(
                    src, os.path.join(artifact_root, reldest)))
            yield False  # Caller need not rebuild
        else:
            self.context.log.info('No cached artifacts for %s' % targets)
            yield True  # Caller must rebuild.

            if self.context.options.write_to_artifact_cache:
                if self._artifact_cache.has(artifact_key):
                    # If we get here it means read_from_artifact_cache is false, so we've rebuilt.
                    # We can verify that what we built is identical to the cached version.
                    # If not, there's a dangerous bug, so we want to warn about this loudly.
                    if self.context.options.verify_artifact_cache:
                        pass  # TODO: verification logic
                else:
                    # if the caller provided paths to artifacts but we didn't previously have them in the cache,
                    # we assume that they are now created, and store them in the artifact cache.
                    self.context.log.info('Caching artifacts for %s' %
                                          str(targets))
                    self._artifact_cache.insert(artifact_key, build_artifacts,
                                                artifact_root)
Exemple #5
0
class Task(object):
  @classmethod
  def setup_parser(cls, option_group, args, mkflag):
    """
      Subclasses can add flags to the pants command line using the given option group.  Flag names
      should be created with mkflag([name]) to ensure flags are properly namespaced amongst other
      tasks.
    """

  def __init__(self, context):
    self.context = context
    self._cache_key_generator = CacheKeyGenerator()
    # TODO: Shared, remote build cache.
    self._artifact_cache = ArtifactCache(context.config.get('tasks', 'artifact_cache'))
    self._build_invalidator_dir = os.path.join(context.config.get('tasks', 'build_invalidator'), self.product_type())

  def product_type(self):
    """
      By default, each task is considered as creating a unique product type.
      Subclasses can override this to specify a shared product type, e.g., 'classes'.

      Tasks with the same product type can invalidate each other's targets, e.g., if a ScalaLibrary
      depends on a JavaLibrary, a change to the JavaLibrary will invalidated the ScalaLibrary because
      they both have the same product type.
    """
    return self.__class__.__name__

  def execute(self, targets):
    """
      Executes this task against the given targets which may be a subset of the current context
      targets.
    """

  def invalidate_for(self):
    """
      Subclasses can override and return an object that should be checked for changes when
      managing target invalidation.  If the pickled form of returned object changes
      between runs all targets will be invalidated.
    """
    return None

  def invalidate_for_files(self):
    """
      Subclasses can override and return a list of full paths to extra, non-source files that should
      be checked for changes when managing target invalidation. This is useful for tracking
      changes to pre-built build tools, e.g., the thrift compiler.
    """
    return []

  @contextmanager
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependants=False):
    """
      Checks targets for invalidation.

      Yields the result to a with block. If no exceptions are thrown by work in the block, the
      cache is updated for the targets, otherwise if a TargetError is thrown by the work in the
      block all targets except those in the TargetError are cached.

      :targets The targets to check for changes.
      :only_buildfiles If True, then just the target's BUILD files are checked for changes.
      :invalidate_dependants If True then any targets depending on changed targets are invalidated.
      :returns: an InvalidationResult reflecting the invalidated targets.
    """
    # invalidate_for() may return an iterable that isn't a set, so we ensure a set here.
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      sha = hashlib.sha1()
      with open(f, "rb") as fd:
        sha.update(fd.read())
      extra_data.append(sha.hexdigest())

    cache_manager = CacheManager(self._cache_key_generator, self._build_invalidator_dir,
      targets, invalidate_dependants, extra_data, only_buildfiles)

    # Check for directly changed targets.
    all_versioned_targets = cache_manager.check(targets)
    invalidation_result = InvalidationResult(all_versioned_targets)
    num_invalid_targets = len(invalidation_result.invalid_targets())

    # Do some reporting.
    if num_invalid_targets > 0:
      num_files = reduce(lambda x, y: x + y,
        [vt.cache_key.num_sources for vt in all_versioned_targets if not vt.valid], 0)
      self.context.log.info('Operating on %d files in %d invalidated targets' % (num_files, num_invalid_targets))

    # Yield the result, and then update the cache.
    try:
      if num_invalid_targets > 0:
        self.context.log.debug('Invalidated targets %s' % invalidation_result.invalid_targets())
      yield invalidation_result
      for vt in invalidation_result.invalid_versioned_targets():
        cache_manager.update(vt.cache_key)

    except TargetError as e:
      # TODO: This partial updating isn't used (yet?). Nowhere in the code do we raise a TargetError.
      for vt in invalidation_result.invalid_versioned_targets():
        if len(vt.targets) != 1:
          raise Exception, 'Logic error: vt should represent a single target'
        if vt.targets[0] not in e.targets:
          cache_manager.update(vt.cache_key)

  @contextmanager
  def check_artifact_cache(self, versioned_targets, build_artifacts, artifact_root):
    """
      See if we have required artifacts in the cache.

      If we do (and reading from the artifact cache is enabled) then we copy the artifacts from the cache.
      If we don't (and writing to the artifact cache is enabled) then we will copy the artifacts into
      the cache when the context is exited.

      Therefore the usage idiom is as follows:

      with self.check_artifact_cache(...) as build:
        if build:
          ... build the necessary artifacts ...

      :versioned_targets a VersionedTargetSet representing a specific version of a set of targets.
      :build_artifacts a list of paths to which the artifacts will be written.
      :artifact_root If not None, the artifact paths will be cached relative to this dir.
      :returns: True if the caller must build the artifacts, False otherwise.
    """
    artifact_key = versioned_targets.cache_key
    targets = versioned_targets.targets
    if self.context.options.read_from_artifact_cache and self._artifact_cache.has(artifact_key):
      self.context.log.info('Using cached artifacts for %s' % targets)
      self._artifact_cache.use_cached_files(artifact_key,
        lambda src, reldest: shutil.copy(src, os.path.join(artifact_root, reldest)))
      yield False  # Caller need not rebuild
    else:
      self.context.log.info('No cached artifacts for %s' % targets)
      yield True  # Caller must rebuild.

      if self.context.options.write_to_artifact_cache:
        if self._artifact_cache.has(artifact_key):
          # If we get here it means read_from_artifact_cache is false, so we've rebuilt.
          # We can verify that what we built is identical to the cached version.
          # If not, there's a dangerous bug, so we want to warn about this loudly.
          if self.context.options.verify_artifact_cache:
            pass  # TODO: verification logic
        else:
          # if the caller provided paths to artifacts but we didn't previously have them in the cache,
          # we assume that they are now created, and store them in the artifact cache.
          self.context.log.info('Caching artifacts for %s' % str(targets))
          self._artifact_cache.insert(artifact_key, build_artifacts, artifact_root)
Exemple #6
0
 def __init__(self, context):
   self.context = context
   self._cache_key_generator = CacheKeyGenerator()
   # TODO: Shared, remote build cache.
   self._artifact_cache = ArtifactCache(context.config.get('tasks', 'artifact_cache'))
   self._build_invalidator_dir = os.path.join(context.config.get('tasks', 'build_invalidator'), self.product_type())
Exemple #7
0
class PythonChroot(object):
  class BuildFailureException(Exception):
    def __init__(self, msg):
      Exception.__init__(self, msg)

  def __init__(self, target, root_dir, extra_targets=None, builder=None):
    self._config = Config.load()

    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    artifact_cache_root = \
      os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
    self._artifact_cache = ArtifactCache(artifact_cache_root)
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = PythonResolver([self._target] + self._extra_targets)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())
    self._platforms = (Platform.current(),)
    self._pythons = (sys.version[:3],)

    # TODO(wickman) Should this be in the binary builder?
    if isinstance(self._target, PythonBinary):
      self._platforms = self._target._platforms
      self._pythons = self._target._interpreters

  def __del__(self):
    if os.getenv('PANTS_LEAVE_CHROOT') is None:
      safe_rmtree(self.path())

  def debug(self, msg, indent=0):
    if os.getenv('PANTS_VERBOSE') is not None:
      print('%s%s' % (' ' * indent, msg))

  def path(self):
    return self._builder.path()

  def _dump_library(self, library):
    def translate_module(module):
      if module is None:
        module = ''
      return module.replace('.', os.path.sep)

    def copy_to_chroot(base, path, relative_to, add_function):
      src = os.path.join(self._root, base, path)
      dst = os.path.join(translate_module(relative_to), path)
      add_function(src, dst)

    self.debug('  Dumping library: %s [relative module: %s]' % (library, library.module))
    for filename in library.sources:
      copy_to_chroot(library.target_base, filename, library.module, self._builder.add_source)
    for filename in library.resources:
      copy_to_chroot(library.target_base, filename, library.module, self._builder.add_resource)

  def _dump_requirement(self, req, dynamic, repo):
    self.debug('  Dumping requirement: %s%s%s' % (str(req),
      ' (dynamic)' if dynamic else '', ' (repo: %s)' if repo else ''))
    self._builder.add_requirement(req, dynamic, repo)

  def _dump_distribution(self, dist):
    self.debug('  Dumping distribution: .../%s' % os.path.basename(dist.location))
    self._builder.add_distribution(dist)

  def _dump_bin(self, binary_name, base):
    src = os.path.join(self._root, base, binary_name)
    self.debug('  Dumping binary: %s' % binary_name)
    self._builder.set_executable(src, os.path.basename(src))

  def _dump_thrift_library(self, library):
    self._dump_built_library(library, PythonThriftBuilder(library, self._root))

  def _dump_antlr_library(self, library):
    self._dump_built_library(library, PythonAntlrBuilder(library, self._root))

  def _dump_built_library(self, library, builder):
    # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
    absolute_sources = library.expand_files()
    absolute_sources.sort()
    cache_key = self._key_generator.key_for(library.id, absolute_sources)
    if self._artifact_cache.has(cache_key):
      self.debug('  Generating (cached) %s...' % library)
      self._artifact_cache.use_cached_files(cache_key, self._builder.add_dependency_file)
    else:
      self.debug('  Generating %s...' % library)
      egg_file = builder.build_egg()
      if not egg_file:
        raise PythonChroot.BuildFailureException("Failed to build %s!" % library)
      src_egg_file = egg_file
      dst_egg_file = os.path.join(os.path.dirname(egg_file),
          cache_key.hash + '_' + os.path.basename(egg_file))
      self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
      os.rename(src_egg_file, dst_egg_file)
      self._artifact_cache.insert(cache_key, [dst_egg_file])
      self._builder.add_egg(dst_egg_file)

  def dump(self):
    self.debug('Building PythonBinary %s:' % self._target)

    targets = self._resolver.resolve()

    for lib in targets['libraries']:
      self._dump_library(lib)

    for req in targets['reqs']:
      if not req.should_build():
        self.debug('Skipping %s based upon version filter' % req)
        continue
      self._dump_requirement(req._requirement, req._dynamic, req._repository)

    for dist in ReqResolver.resolve(
        (req._requirement for req in targets['reqs'] if req.should_build()),
        self._config, self._platforms, self._pythons,
        ignore_errors=self._builder.info().ignore_errors):
      self._dump_distribution(dist)

    for thr in targets['thrifts']:
      self._dump_thrift_library(thr)

    for antlr in targets['antlrs']:
      self._dump_antlr_library(antlr)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    for binary in targets['binaries']:
      if len(binary.sources) > 0:
        self._dump_bin(binary.sources[0], binary.target_base)

    return self._builder
def test_env(content=TEST_CONTENT):
    with temporary_dir() as d:
        with tempfile.NamedTemporaryFile() as f:
            f.write(content)
            f.flush()
            yield f, ArtifactCache(d)
Exemple #9
0
class PythonChroot(object):
    class BuildFailureException(Exception):
        def __init__(self, msg):
            Exception.__init__(self, msg)

    def __init__(self, target, root_dir, extra_targets=None, builder=None):
        self._config = Config.load()

        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        artifact_cache_root = \
          os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
        self._artifact_cache = ArtifactCache(artifact_cache_root)
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = PythonResolver([self._target] + self._extra_targets)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())
        self._platforms = (Platform.current(), )
        self._pythons = (sys.version[:3], )

        # TODO(wickman) Should this be in the binary builder?
        if isinstance(self._target, PythonBinary):
            self._platforms = self._target._platforms
            self._pythons = self._target._interpreters

    def __del__(self):
        if os.getenv('PANTS_LEAVE_CHROOT') is None:
            safe_rmtree(self.path())

    def debug(self, msg, indent=0):
        if os.getenv('PANTS_VERBOSE') is not None:
            print('%s%s' % (' ' * indent, msg))

    def path(self):
        return self._builder.path()

    def _dump_library(self, library):
        def translate_module(module):
            if module is None:
                module = ''
            return module.replace('.', os.path.sep)

        def copy_to_chroot(base, path, relative_to, add_function):
            src = os.path.join(self._root, base, path)
            dst = os.path.join(translate_module(relative_to), path)
            add_function(src, dst)

        self.debug('  Dumping library: %s [relative module: %s]' %
                   (library, library.module))
        for filename in library.sources:
            copy_to_chroot(library.target_base, filename, library.module,
                           self._builder.add_source)
        for filename in library.resources:
            copy_to_chroot(library.target_base, filename, library.module,
                           self._builder.add_resource)

    def _dump_requirement(self, req, dynamic, repo):
        self.debug('  Dumping requirement: %s%s%s' %
                   (str(req), ' (dynamic)' if dynamic else '',
                    ' (repo: %s)' if repo else ''))
        self._builder.add_requirement(req, dynamic, repo)

    def _dump_distribution(self, dist):
        self.debug('  Dumping distribution: .../%s' %
                   os.path.basename(dist.location))
        self._builder.add_distribution(dist)

    def _dump_bin(self, binary_name, base):
        src = os.path.join(self._root, base, binary_name)
        self.debug('  Dumping binary: %s' % binary_name)
        self._builder.set_executable(src, os.path.basename(src))

    def _dump_thrift_library(self, library):
        self._dump_built_library(library,
                                 PythonThriftBuilder(library, self._root))

    def _dump_antlr_library(self, library):
        self._dump_built_library(library,
                                 PythonAntlrBuilder(library, self._root))

    def _dump_built_library(self, library, builder):
        # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
        absolute_sources = library.expand_files()
        absolute_sources.sort()
        cache_key = self._key_generator.key_for(library.id, absolute_sources)
        if self._artifact_cache.has(cache_key):
            self.debug('  Generating (cached) %s...' % library)
            self._artifact_cache.use_cached_files(
                cache_key, self._builder.add_dependency_file)
        else:
            self.debug('  Generating %s...' % library)
            egg_file = builder.build_egg()
            if not egg_file:
                raise PythonChroot.BuildFailureException(
                    "Failed to build %s!" % library)
            src_egg_file = egg_file
            dst_egg_file = os.path.join(
                os.path.dirname(egg_file),
                cache_key.hash + '_' + os.path.basename(egg_file))
            self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
            os.rename(src_egg_file, dst_egg_file)
            self._artifact_cache.insert(cache_key, [dst_egg_file])
            self._builder.add_egg(dst_egg_file)

    def dump(self):
        self.debug('Building PythonBinary %s:' % self._target)

        targets = self._resolver.resolve()

        for lib in targets['libraries']:
            self._dump_library(lib)

        for req in targets['reqs']:
            if not req.should_build():
                self.debug('Skipping %s based upon version filter' % req)
                continue
            self._dump_requirement(req._requirement, req._dynamic,
                                   req._repository)

        for dist in ReqResolver.resolve(
            (req._requirement
             for req in targets['reqs'] if req.should_build()),
                self._config,
                self._platforms,
                self._pythons,
                ignore_errors=self._builder.info().ignore_errors):
            self._dump_distribution(dist)

        for thr in targets['thrifts']:
            self._dump_thrift_library(thr)

        for antlr in targets['antlrs']:
            self._dump_antlr_library(antlr)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        for binary in targets['binaries']:
            if len(binary.sources) > 0:
                self._dump_bin(binary.sources[0], binary.target_base)

        return self._builder