Exemplo n.º 1
0
  def _compare_file_checksums(filepath, checksum=None, digest=None):
    digest = digest or hashlib.sha1()

    if os.path.isfile(filepath) and checksum:
      return hash_file(filepath, digest=digest) == checksum

    return os.path.isfile(filepath)
Exemplo n.º 2
0
  def _compare_file_checksums(filepath, checksum=None, digest=None):
    digest = digest or hashlib.sha1()

    if os.path.isfile(filepath) and checksum:
      return hash_file(filepath, digest=digest) == checksum

    return os.path.isfile(filepath)
Exemplo n.º 3
0
    def _get_host_libc_from_host_compiler(self):
        """Locate the host's libc-dev installation using a specified host compiler's search dirs."""
        compiler_exe = self.get_options().host_compiler

        # Implicitly, we are passing in the environment of the executing pants process to
        # `get_compiler_library_dirs()`.
        # These directories are checked to exist!
        library_dirs = self._parse_search_dirs.get_compiler_library_dirs(
            compiler_exe)

        libc_crti_object_file = None
        for libc_dir_candidate in library_dirs:
            maybe_libc_crti = os.path.join(libc_dir_candidate,
                                           self._LIBC_INIT_OBJECT_FILE)
            if os.path.isfile(maybe_libc_crti):
                libc_crti_object_file = maybe_libc_crti
                break

        if not libc_crti_object_file:
            raise self.HostLibcDevResolutionError(
                "Could not locate {fname} in library search dirs {dirs} from compiler: {compiler!r}. "
                "You may need to install a libc dev package for the current system. "
                "For many operating systems, this package is named 'libc-dev' or 'libc6-dev'."
                .format(fname=self._LIBC_INIT_OBJECT_FILE,
                        dirs=library_dirs,
                        compiler=compiler_exe))

        return HostLibcDev(crti_object=libc_crti_object_file,
                           fingerprint=hash_file(libc_crti_object_file))
Exemplo n.º 4
0
  def test_hash_file(self):
    expected_hash = hashlib.md5()
    expected_hash.update(b'jake jones')

    with temporary_file() as fd:
      fd.write(b'jake jones')
      fd.close()

      self.assertEqual(expected_hash.hexdigest(), hash_file(fd.name, digest=hashlib.md5()))
Exemplo n.º 5
0
  def test_hash_file(self):
    expected_hash = hashlib.md5()
    expected_hash.update(b'jake jones')

    with temporary_file() as fd:
      fd.write(b'jake jones')
      fd.close()

      self.assertEqual(expected_hash.hexdigest(), hash_file(fd.name, digest=hashlib.md5()))
Exemplo n.º 6
0
  def test_hash_file(self):
    self.digest.update('jake jones')
    self.digest.hexdigest().AndReturn('1137')
    self.mox.ReplayAll()

    with temporary_file() as fd:
      fd.write('jake jones')
      fd.close()

      self.assertEqual('1137', hash_file(fd.name, digest=self.digest))
Exemplo n.º 7
0
    def host_libc(self):
        """Use the --libc-dir option if provided, otherwise invoke a host compiler to find libc dev."""
        libc_dir_option = self.get_options().libc_dir
        maybe_libc_crti = os.path.join(libc_dir_option,
                                       self._LIBC_INIT_OBJECT_FILE)
        if os.path.isfile(maybe_libc_crti):
            return HostLibcDev(crti_object=maybe_libc_crti,
                               fingerprint=hash_file(maybe_libc_crti))

        return self._get_host_libc_from_host_compiler()
Exemplo n.º 8
0
    def _host_libc(self):
        """Use the --libc-dir option if provided, otherwise invoke a host compiler to find libc dev."""
        libc_dir_option = self.get_options().libc_dir
        if libc_dir_option:
            maybe_libc_crti = os.path.join(libc_dir_option,
                                           self._LIBC_INIT_OBJECT_FILE)
            if os.path.isfile(maybe_libc_crti):
                return HostLibcDev(crti_object=maybe_libc_crti,
                                   fingerprint=hash_file(maybe_libc_crti))
            raise self.HostLibcDevResolutionError(
                "Could not locate {} in directory {} provided by the --libc-dir option."
                .format(self._LIBC_INIT_OBJECT_FILE, libc_dir_option))

        return self._get_host_libc_from_host_compiler()
Exemplo n.º 9
0
Arquivo: task.py Projeto: igmor/pants
  def create_cache_manager(self, invalidate_dependents, fingerprint_strategy=None):
    """Creates a cache manager that can be used to invalidate targets on behalf of this task.

    Use this if you need to check for invalid targets but can't use the contextmanager created by
    invalidated(), e.g., because you don't want to mark the targets as valid when done.

    invalidate_dependents:   If True then any targets depending on changed targets are invalidated.
    fingerprint_strategy:    A FingerprintStrategy instance, which can do per task, finer grained
                             fingerprinting of a given Target.
    """
    extra_data = [self.invalidate_for()]

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    return InvalidationCacheManager(self._cache_key_generator,
                                    self._build_invalidator_dir,
                                    invalidate_dependents,
                                    extra_data,
                                    fingerprint_strategy=fingerprint_strategy)
Exemplo n.º 10
0
  def create_cache_manager(self, invalidate_dependents, fingerprint_strategy=None):
    """Creates a cache manager that can be used to invalidate targets on behalf of this task.

    Use this if you need to check for invalid targets but can't use the contextmanager created by
    invalidated(), e.g., because you don't want to mark the targets as valid when done.

    invalidate_dependents:   If True then any targets depending on changed targets are invalidated.
    fingerprint_strategy:    A FingerprintStrategy instance, which can do per task, finer grained
                             fingerprinting of a given Target.
    """
    extra_data = [self.invalidate_for()]

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    return InvalidationCacheManager(self._cache_key_generator,
                                    self._build_invalidator_dir,
                                    invalidate_dependents,
                                    extra_data,
                                    fingerprint_strategy=fingerprint_strategy)
Exemplo n.º 11
0
    def eslint_supportdir(self, task_workdir):
        """ Returns the path where the ESLint is bootstrapped.
    
    :param string task_workdir: The task's working directory
    :returns: The path where ESLint is bootstrapped and whether or not it is configured
    :rtype: (string, bool)
    """
        bootstrapped_support_path = os.path.join(task_workdir, 'eslint')

        # TODO(nsaechao): Should only have to check if the "eslint" dir exists in the task_workdir
        # assuming fingerprinting works as intended.

        # If the eslint_setupdir is not provided or missing required files, then
        # clean up the directory so that Pants can install a pre-defined eslint version later on.
        # Otherwise, if there is no configurations changes, rely on the cache.
        # If there is a config change detected, use the new configuration.
        configured = False
        if self.eslint_setupdir:
            configured = self._binary_util.is_bin_valid(
                self.eslint_setupdir, [
                    BinaryUtil.BinaryFileSpec('package.json'),
                    BinaryUtil.BinaryFileSpec('yarn.lock')
                ])
        if not configured:
            safe_mkdir(bootstrapped_support_path, clean=True)
        else:
            binary_file_specs = [
                BinaryUtil.BinaryFileSpec(
                    f, hash_file(os.path.join(self.eslint_setupdir, f)))
                for f in ['yarn.lock', 'package.json']
            ]
            installed = self._binary_util.is_bin_valid(
                bootstrapped_support_path, binary_file_specs)
            if not installed:
                self._configure_eslinter(bootstrapped_support_path)
        return bootstrapped_support_path, configured
Exemplo n.º 12
0
 def log_zinc_file(self, analysis_file):
     self.context.log.debug('Calling zinc on: {} ({})'.format(
         analysis_file,
         hash_file(analysis_file).upper()
         if os.path.exists(analysis_file) else 'nonexistent'))
Exemplo n.º 13
0
 def log_zinc_file(self, analysis_file):
   self.context.log.debug('Calling zinc on: %s (%s)' %
                          (analysis_file,
                           hash_file(analysis_file).upper()
                           if os.path.exists(analysis_file)
                           else 'nonexistent'))
Exemplo n.º 14
0
 def log_zinc_file(self, analysis_file):
     self.context.log.debug(
         "Calling zinc on: {} ({})".format(
             analysis_file, hash_file(analysis_file).upper() if os.path.exists(analysis_file) else "nonexistent"
         )
     )
Exemplo n.º 15
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    extra_data = [self.invalidate_for()]

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = CacheManager(self._cache_key_generator,
                                 self._build_invalidator_dir,
                                 invalidate_dependents,
                                 extra_data,
                                 only_externaldeps=only_buildfiles)

    invalidation_check = cache_manager.check(targets, partition_size_hint)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

    if not silent:
      targets = []
      sources = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        sources.extend(vt.cache_key.sources)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(sources) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(sources, 'source file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
Exemplo n.º 16
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False, locally_changed_targets=None):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:                 The targets to check for changes.
    only_buildfiles:         If True, then only the target's BUILD files are checked for changes,
                             not its sources.
    invalidate_dependents:   If True then any targets depending on changed targets are invalidated.
    partition_size_hint:     Each VersionedTargetSet in the yielded list will represent targets
                             containing roughly this number of source files, if possible. Set to
                             sys.maxint for a single VersionedTargetSet. Set to 0 for one
                             VersionedTargetSet per target. It is up to the caller to do the right
                             thing with whatever partitioning it asks for.
    locally_changed_targets: Targets that we've edited locally. If specified, and there aren't too
                             many of them, we keep these in separate partitions from other targets,
                             as these are more likely to have build errors, and so to be rebuilt over
                             and over, and partitioning them separately is a performance win.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    # TODO(benjy): Compute locally_changed_targets here instead of passing it in? We currently pass
    # it in because JvmCompile already has the source->target mapping for other reasons, and also
    # to selectively enable this feature.
    extra_data = [self.invalidate_for()]

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = InvalidationCacheManager(self._cache_key_generator,
                                             self._build_invalidator_dir,
                                             invalidate_dependents,
                                             extra_data)

    # We separate locally-modified targets from others by coloring them differently.
    # This can be a performance win, because these targets are more likely to be iterated
    # over, and this preserves "chunk stability" for them.
    colors = {}

    # But we only do so if there aren't too many, or this optimization will backfire.
    locally_changed_target_limit = 10

    if locally_changed_targets and len(locally_changed_targets) < locally_changed_target_limit:
      for t in targets:
        if t in locally_changed_targets:
          colors[t] = 'locally_changed'
        else:
          colors[t] = 'not_locally_changed'
    invalidation_check = cache_manager.check(targets, partition_size_hint, colors)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint, colors)

    if not silent:
      targets = []
      payloads = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        payloads.extend(vt.cache_key.payloads)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(payloads) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(payloads, 'payload file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    for vt in invalidation_check.invalid_vts:
      vt.update()  # In case the caller doesn't update.