Exemplo n.º 1
0
 def _dump_built_library(self, library, builder):
   # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
   absolute_sources = library.expand_files()
   absolute_sources.sort()
   cache_key = self._key_generator.key_for(library.id, absolute_sources)
   cache_dir = os.path.join(self._egg_cache_root, cache_key.hash)
   if os.path.exists(cache_dir):
     self.debug('  Generating (cached) %s...' % library)
     # We have no idea what the egg path is, so we simply walk the directory.
     for dir_name, _, filenames in os.walk(cache_dir):
       for filename in filenames:
         self._builder.add_egg(os.path.join(dir_name, filename))
   else:
     self.debug('  Generating %s...' % library)
     egg_file = builder.build_egg()
     if not egg_file:
       raise PythonChroot.BuildFailureException("Failed to build %s!" % library)
     src_egg_file = egg_file
     dst_egg_file = os.path.join(os.path.dirname(egg_file),
         cache_key.hash + '_' + os.path.basename(egg_file))
     self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
     os.rename(src_egg_file, dst_egg_file)
     cache_dir = os.path.join(self._egg_cache_root, cache_key.hash)
     cached_egg_file = os.path.join(cache_dir, os.path.relpath(dst_egg_file, self._root))
     try:
       safe_mkdir_for(cached_egg_file)
       shutil.copy(dst_egg_file, cached_egg_file)
     except:
       safe_rmtree(cache_dir)
       raise
     self._builder.add_egg(dst_egg_file)
Exemplo n.º 2
0
 def erase_logs(self, task_id):
   for fn in self.get_logs(task_id, with_size=False):
     safe_delete(fn)
   state = self.state(task_id)
   if state and state.header:
     safe_rmtree(TaskPath(root=self._root, task_id=task_id, log_dir=state.header.log_dir)
                 .getpath('process_logbase'))
Exemplo n.º 3
0
 def _dump_built_library(self, library, builder):
     # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
     absolute_sources = library.expand_files()
     absolute_sources.sort()
     cache_key = self._key_generator.key_for(library.id, absolute_sources)
     cache_dir = os.path.join(self._egg_cache_root, cache_key.hash)
     if os.path.exists(cache_dir):
         self.debug('  Generating (cached) %s...' % library)
         # We have no idea what the egg path is, so we simply walk the directory.
         for dir_name, _, filenames in os.walk(cache_dir):
             for filename in filenames:
                 self._builder.add_egg(os.path.join(dir_name, filename))
     else:
         self.debug('  Generating %s...' % library)
         egg_file = builder.build_egg()
         if not egg_file:
             raise PythonChroot.BuildFailureException(
                 "Failed to build %s!" % library)
         src_egg_file = egg_file
         dst_egg_file = os.path.join(
             os.path.dirname(egg_file),
             cache_key.hash + '_' + os.path.basename(egg_file))
         self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
         os.rename(src_egg_file, dst_egg_file)
         cache_dir = os.path.join(self._egg_cache_root, cache_key.hash)
         cached_egg_file = os.path.join(
             cache_dir, os.path.relpath(dst_egg_file, self._root))
         try:
             safe_mkdir_for(cached_egg_file)
             shutil.copy(dst_egg_file, cached_egg_file)
         except:
             safe_rmtree(cache_dir)
             raise
         self._builder.add_egg(dst_egg_file)
Exemplo n.º 4
0
    def update(self, cache_key, build_artifacts=None, artifact_root=None):
        """Cache the output of a build.

    If the cache area contains an existing object with the same (path, source_sha) its path will
    be returned. If no such object exists, builder will be called with a path inside the staging
    area and should create a new object.

    :param cache_key: A CacheKey object (typically returned by BuildCache.key_for().
    :param build_artifacts: List of paths to generated artifacts under artifact_root.
    :param artifact_root: Optional root directory under which artifacts are stored.
    """
        safe_rmtree(cache_key.filename)
        for artifact in build_artifacts or ():
            rel_path = os.path.basename(artifact) \
                if artifact_root is None \
                else os.path.relpath(artifact, artifact_root)
            assert not rel_path.startswith('..'), \
              'Weird: artifact=%s, rel_path=%s' % (artifact, rel_path)
            artifact_dest = os.path.join(cache_key.filename, rel_path)
            dir_name = os.path.dirname(artifact_dest)
            if not os.path.exists(dir_name):
                os.makedirs(dir_name)
            if os.path.isdir(artifact):
                shutil.copytree(artifact, artifact_dest)
            else:
                shutil.copy(artifact, artifact_dest)
        self._write_sha(cache_key)
Exemplo n.º 5
0
 def erase_logs(self):
     for fn in self.get_logs(with_size=False):
         safe_delete(fn)
     if self._state and self._state.header:
         path = self._pathspec.given(
             log_dir=self._state.header.log_dir).getpath('process_logbase')
         safe_rmtree(path)
Exemplo n.º 6
0
 def translate(self, link):
     """From a link, translate a distribution."""
     if not isinstance(link, SourceLink):
         return None
     if not Platform.compatible(Platform.current(), self._platform):
         return None
     if not Platform.version_compatible(Platform.python(), self._python):
         return None
     unpack_path, installer = None, None
     try:
         unpack_path = link.fetch(conn_timeout=self._conn_timeout)
         with TRACER.timed('Installing %s' % link.name):
             installer = Installer(unpack_path,
                                   strict=(link.name != 'distribute'))
         with TRACER.timed('Distilling %s' % link.name):
             try:
                 dist = installer.distribution()
             except Installer.InstallFailure:
                 return None
             return dist_from_egg(
                 Distiller(dist).distill(into=self._install_cache))
     finally:
         if installer:
             installer.cleanup()
         if unpack_path:
             safe_rmtree(unpack_path)
Exemplo n.º 7
0
  def insert(self, cache_key, build_artifacts, artifact_root=None):
    """Cache the output of a build.

    If there is an existing set of artifacts for this key they are deleted.

    TODO: Check that they're equal? If they aren't it's a grave bug, since the key is supposed
    to be a fingerprint of all possible inputs to the build.

    :param cache_key: A CacheKey object.
    :param build_artifacts: List of paths to generated artifacts under artifact_root.
    :param artifact_root: Optional root directory under which artifacts are stored.
    """
    cache_dir = self._cache_dir_for_key(cache_key)
    try:
      safe_rmtree(cache_dir)
      for artifact in build_artifacts or ():
        rel_path = os.path.basename(artifact) \
        if artifact_root is None \
        else os.path.relpath(artifact, artifact_root)
        assert not rel_path.startswith('..'), \
          'Weird: artifact=%s, rel_path=%s' % (artifact, rel_path)
        artifact_dest = os.path.join(cache_dir, rel_path)
        dir_name = os.path.dirname(artifact_dest)
        safe_mkdir(dir_name)
        if os.path.isdir(artifact):
          shutil.copytree(artifact, artifact_dest)
        else:
          shutil.copy(artifact, artifact_dest)
    except Exception as e:
      try:
        safe_rmtree(cache_dir)
      except Exception as e:
        print('IMPORTANT: failed to delete %s on error. Your artifact cache may be corrupted. '
              'Please delete manually.' % cache_dir)
      raise e
Exemplo n.º 8
0
  def _merge_classes_dir(self, state):
    """Merge the classes dirs from the underlying artifacts into a single dir.

    May symlink instead of copying, when it's OK to do so.

    Postcondition: symlinks are of leaf packages only.
    """
    if len(self.underlying_artifacts) <= 1:
      return
    self.log.debug('Merging classes dirs into %s' % self.classes_dir)
    symlinkable_packages = self._symlinkable_packages(state)
    for artifact in self.underlying_artifacts:
      classnames_by_package = defaultdict(list)
      for cls in state.classes_by_target.get(artifact.targets[0], []):
        classnames_by_package[os.path.dirname(cls)].append(os.path.basename(cls))

      for package, classnames in classnames_by_package.items():
        artifact_package_dir = os.path.join(artifact.classes_dir, package)
        merged_package_dir = os.path.join(self.classes_dir, package)

        if package in symlinkable_packages:
          if os.path.islink(merged_package_dir):
            assert os.readlink(merged_package_dir) == artifact_package_dir
          elif os.path.exists(merged_package_dir):
            safe_rmtree(merged_package_dir)
            os.symlink(artifact_package_dir, merged_package_dir)
          else:
            safe_mkdir(os.path.dirname(merged_package_dir))
            os.symlink(artifact_package_dir, merged_package_dir)
        else:
          safe_mkdir(merged_package_dir)
          for classname in classnames:
            src = os.path.join(artifact_package_dir, classname)
            dst = os.path.join(merged_package_dir, classname)
            self._maybe_hardlink(src, dst)
Exemplo n.º 9
0
  def update(self, cache_key, build_artifacts=None, artifact_root=None):
    """Cache the output of a build.

    If the cache area contains an existing object with the same (path, source_sha) its path will
    be returned. If no such object exists, builder will be called with a path inside the staging
    area and should create a new object.

    :param cache_key: A CacheKey object (typically returned by BuildCache.key_for().
    :param build_artifacts: List of paths to generated artifacts under artifact_root.
    :param artifact_root: Optional root directory under which artifacts are stored.
    """
    safe_rmtree(cache_key.filename)
    for artifact in build_artifacts or ():
      rel_path = os.path.basename(artifact) \
          if artifact_root is None \
          else os.path.relpath(artifact, artifact_root)
      assert not rel_path.startswith('..'), \
        'Weird: artifact=%s, rel_path=%s' % (artifact, rel_path)
      artifact_dest = os.path.join(cache_key.filename, rel_path)
      dir_name = os.path.dirname(artifact_dest)
      if not os.path.exists(dir_name):
        os.makedirs(dir_name)
      if os.path.isdir(artifact):
        shutil.copytree(artifact, artifact_dest)
      else:
        shutil.copy(artifact, artifact_dest)
    self._write_sha(cache_key)
Exemplo n.º 10
0
  def _merge_artifact(self, versioned_target_set):
    """Merges artifacts representing the individual targets in a VersionedTargetSet into one artifact for that set.
    Creates an output classes dir, depfile and analysis file for the VersionedTargetSet.
    Note that the merged artifact may be incomplete (e.g., if we have no previous artifacts for some of the
    individual targets). That's OK: We run this right before we invoke zinc, which will fill in what's missing.
    This method is not required for correctness, only for efficiency: it can prevent zinc from doing superfluous work.

    NOTE: This method is reentrant.
    """
    if len(versioned_target_set.targets) <= 1:
      return  # Nothing to do.

    with temporary_dir() as tmpdir:
      dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths(versioned_target_set.targets)
      safe_rmtree(dst_classes_dir)
      safe_mkdir(dst_classes_dir)
      src_analysis_files = []

      # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
      dst_deps = Dependencies(dst_classes_dir)

      for target in versioned_target_set.targets:
        src_classes_dir, src_depfile, src_analysis_file = self._output_paths([target])
        if os.path.exists(src_depfile):
          src_deps = Dependencies(src_classes_dir)
          src_deps.load(src_depfile)
          dst_deps.merge(src_deps)

          classes_by_source = src_deps.findclasses([target]).get(target, {})
          for source, classes in classes_by_source.items():
            for cls in classes:
              src = os.path.join(src_classes_dir, cls)
              dst = os.path.join(dst_classes_dir, cls)
              # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
              # it's missing and rebuild it.
              # dst may already exist if we have overlapping targets. It's not a good idea
              # to have those, but until we enforce it, we must allow it here.
              if os.path.exists(src) and not os.path.exists(dst):
                # Copy the class file.
                safe_mkdir(os.path.dirname(dst))
                os.link(src, dst)

          # Rebase a copy of the per-target analysis files to reflect the merged classes dir.
          if os.path.exists(src_analysis_file):
            src_analysis_file_tmp = \
            os.path.join(tmpdir, os.path.relpath(src_analysis_file, self._analysis_files_base))
            shutil.copyfile(src_analysis_file, src_analysis_file_tmp)
            src_analysis_files.append(src_analysis_file_tmp)
            if self._zinc_utils.run_zinc_rebase(src_analysis_file_tmp, [(src_classes_dir, dst_classes_dir)]):
              self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. '\
                                    'Target may require a full rebuild.' %\
                                    src_analysis_file_tmp)

      dst_deps.save(dst_depfile)

      if self._zinc_utils.run_zinc_merge(src_analysis_files, dst_analysis_file):
        self.context.log.warn('zinc failed to merge analysis files %s to %s. '\
                              'Target may require a full rebuild.' %\
                             (':'.join(src_analysis_files), dst_analysis_file))
Exemplo n.º 11
0
def _cautious_rmtree(root):
    real_buildroot = os.path.realpath(os.path.abspath(get_buildroot()))
    real_root = os.path.realpath(os.path.abspath(root))
    if not real_root.startswith(real_buildroot):
        raise TaskError(
            'DANGER: Attempting to delete %s, which is not under the build root!'
        )
    safe_rmtree(real_root)
Exemplo n.º 12
0
  def split_artifact(self, deps, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return
    buildroot = get_buildroot()
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets)
    analysis_splits = []  # List of triples of (list of sources, destination output dir, destination analysis cache).
    # for dependency analysis, we need to record the cache files that we create in the split

    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target])
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)

      sources = []
      dst_deps = Dependencies(dst_output_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        source_abspath = os.path.join(buildroot, target.target_base, source)
        sources.append(source_abspath)
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_output_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_output_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append((sources, dst_output_dir, dst_analysis_cache))
      self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache))
    # Use zinc to split the analysis files.
    if os.path.exists(src_analysis_cache):
      analysis_args = []
      analysis_args.extend(self._zinc_jar_args)
      analysis_args.extend([
        '-log-level', self.context.options.log_level or 'info',
        '-analysis',
        '-mirror-analysis'
        ])
      split_args = analysis_args + [
        '-cache', src_analysis_cache,
        '-split', ','.join(['{%s}:%s' % (':'.join(x[0]), x[2]) for x in analysis_splits]),
        ]
      if self.runjava(self._main, classpath=self._zinc_classpath, args=split_args, jvmargs=self._jvm_args):
        raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                         (':'.join([x[2] for x in analysis_splits]), src_analysis_cache)

      # Now rebase the newly created analysis files.
      for split in analysis_splits:
        dst_analysis_cache = split[2]
        if os.path.exists(dst_analysis_cache):
          rebase_args = analysis_args + [
            '-cache', dst_analysis_cache,
            '-rebase', '%s:%s' % (src_output_dir, split[1]),
            ]
          if self.runjava(self._main, classpath=self._zinc_classpath, args=rebase_args, jvmargs=self._jvm_args):
            raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
Exemplo n.º 13
0
  def _split_classes_dir(self, state, diff):
    """Split the merged classes dir into one dir per underlying artifact."""
    if len(self.underlying_artifacts) <= 1:
      return

    def map_classes_by_package(classes):
      # E.g., com/foo/bar/Bar.scala, com/foo/bar/Baz.scala to com/foo/bar -> [Bar.scala, Baz.scala].
      ret = defaultdict(list)
      for cls in classes:
        ret[os.path.dirname(cls)].append(os.path.basename(cls))
      return ret

    self.log.debug('Splitting classes dir %s' % self.classes_dir)
    if diff:
      new_or_changed_classnames_by_package = map_classes_by_package(diff.new_or_changed_classes)
      deleted_classnames_by_package = map_classes_by_package(diff.deleted_classes)
    else:
      new_or_changed_classnames_by_package = None
      deleted_classnames_by_package = None

    symlinkable_packages = self._symlinkable_packages(state)
    for artifact in self.underlying_artifacts:
      classnames_by_package = \
        map_classes_by_package(state.classes_by_target.get(artifact.targets[0], []))

      for package, classnames in classnames_by_package.items():
        if package == "":
          raise  TaskError("Found class files %s with empty package" % classnames)
        artifact_package_dir = os.path.join(artifact.classes_dir, package)
        merged_package_dir = os.path.join(self.classes_dir, package)

        if package in symlinkable_packages:
          if os.path.islink(merged_package_dir):
            current_link = os.readlink(merged_package_dir)
            if current_link != artifact_package_dir:
              # The code moved to a different target.
              os.unlink(merged_package_dir)
              safe_rmtree(artifact_package_dir)
              shutil.move(current_link, artifact_package_dir)
              os.symlink(artifact_package_dir, merged_package_dir)
          else:
            safe_rmtree(artifact_package_dir)
            shutil.move(merged_package_dir, artifact_package_dir)
            os.symlink(artifact_package_dir, merged_package_dir)
        else:
          safe_mkdir(artifact_package_dir)
          new_or_changed_classnames = \
            set(new_or_changed_classnames_by_package.get(package, [])) if diff else None
          for classname in classnames:
            if not diff or classname in new_or_changed_classnames:
              src = os.path.join(merged_package_dir, classname)
              dst = os.path.join(artifact_package_dir, classname)
              self._maybe_hardlink(src, dst)
          if diff:
            for classname in deleted_classnames_by_package.get(package, []):
              path = os.path.join(artifact_package_dir, classname)
              if os.path.exists(path):
                os.unlink(path)
Exemplo n.º 14
0
def initial_reporting(config, run_tracker):
    """Sets up the initial reporting configuration.

  Will be changed after we parse cmd-line flags.
  """
    reports_dir = config.get('reporting',
                             'reports_dir',
                             default=os.path.join(
                                 config.getdefault('pants_workdir'),
                                 'reports'))
    link_to_latest = os.path.join(reports_dir, 'latest')
    if os.path.exists(link_to_latest):
        os.unlink(link_to_latest)

    run_id = run_tracker.run_info.get_info('id')
    if run_id is None:
        raise ReportingError('No run_id set')
    run_dir = os.path.join(reports_dir, run_id)
    safe_rmtree(run_dir)

    html_dir = os.path.join(run_dir, 'html')
    safe_mkdir(html_dir)
    os.symlink(run_dir, link_to_latest)

    report = Report()

    # Capture initial console reporting into a buffer. We'll do something with it once
    # we know what the cmd-line flag settings are.
    outfile = StringIO()
    capturing_reporter_settings = PlainTextReporter.Settings(
        outfile=outfile,
        log_level=Report.INFO,
        color=False,
        indent=True,
        timing=False,
        cache_stats=False)
    capturing_reporter = PlainTextReporter(run_tracker,
                                           capturing_reporter_settings)
    report.add_reporter('capturing', capturing_reporter)

    # Set up HTML reporting. We always want that.
    template_dir = config.get('reporting', 'reports_template_dir')
    html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO,
                                                   html_dir=html_dir,
                                                   template_dir=template_dir)
    html_reporter = HtmlReporter(run_tracker, html_reporter_settings)
    report.add_reporter('html', html_reporter)

    # Add some useful RunInfo.
    run_tracker.run_info.add_info('default_report',
                                  html_reporter.report_path())
    port = ReportingServerManager.get_current_server_port()
    if port:
        run_tracker.run_info.add_info(
            'report_url', 'http://localhost:%d/run/%s' % (port, run_id))

    return report
Exemplo n.º 15
0
    def invalidate(self, cache_key):
        """Invalidates this cache key and any cached files associated with it.

    :param cache_key: A CacheKey object (as returned by BuildCache.key_for().
    """
        safe_rmtree(cache_key.filename)
        sha_file = self._sha_file(cache_key)
        if os.path.exists(sha_file):
            os.unlink(sha_file)
Exemplo n.º 16
0
  def _split_artifact(self, deps, versioned_target_set):
    """Splits an artifact representing several targets into target-by-target artifacts.
    Creates an output classes dir, a depfile and an analysis file for each target.
    Note that it's not OK to create incomplete artifacts here: this is run *after* a zinc invocation,
    and the expectation is that the result is complete.

    NOTE: This method is reentrant.
    """
    if len(versioned_target_set.targets) <= 1:
      return
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_classes_dir, _, src_analysis_file = self._output_paths(versioned_target_set.targets)

    # Specifies that the list of sources defines a split to the classes dir and analysis file.
    SplitInfo = namedtuple('SplitInfo', ['sources', 'dst_classes_dir', 'dst_analysis_file'])

    analysis_splits = []  # List of SplitInfos.
    portable_analysis_splits = []  # The same, for the portable version of the analysis cache.

    # Prepare the split arguments.
    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths([target])
      safe_rmtree(dst_classes_dir)
      safe_mkdir(dst_classes_dir)

      sources = []
      dst_deps = Dependencies(dst_classes_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        sources.append(os.path.join(target.target_base, source))
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_classes_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_classes_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append(SplitInfo(sources, dst_classes_dir, dst_analysis_file))
      portable_analysis_splits.append(SplitInfo(sources, dst_classes_dir, _portable(dst_analysis_file)))

    def do_split(src_analysis_file, splits):
      if os.path.exists(src_analysis_file):
        if self._zinc_utils.run_zinc_split(src_analysis_file, [(x.sources, x.dst_analysis_file) for x in splits]):
          raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                           (':'.join([x.dst_analysis_file for x in splits]), src_analysis_file)
        for split in splits:
          if os.path.exists(split.dst_analysis_file):
            if self._zinc_utils.run_zinc_rebase(split.dst_analysis_file,
                                                [(src_classes_dir, split.dst_classes_dir)]):
              raise TaskError, \
                'In split_artifact: zinc failed to rebase analysis file %s' % split.dst_analysis_file

    # Now rebase the newly created analysis file(s) to reflect the split classes dirs.
    do_split(src_analysis_file, analysis_splits)
    do_split(_portable(src_analysis_file), portable_analysis_splits)
 def teardown_class(cls):
     if 'THERMOS_DEBUG' not in os.environ:
         safe_rmtree(cls.LOG_DIR)
         if cls.PEX_PATH:
             safe_rmtree(os.path.dirname(cls.PEX_PATH))
     else:
         print('Saving executor logs in %s' % cls.LOG_DIR)
         if cls.PEX_PATH:
             print('Saved thermos executor at %s' % cls.PEX_PATH)
Exemplo n.º 18
0
 def erase_data(self, task_id):
   # TODO(wickman)
   # This could be potentially dangerous if somebody naively runs their sandboxes in e.g.
   # $HOME or / or similar.  Perhaps put a guard somewhere?
   for fn in self.get_data(task_id, with_size=False):
     os.unlink(fn)
   state = self.state(task_id)
   if state and state.header and state.header.sandbox:
     safe_rmtree(state.header.sandbox)
Exemplo n.º 19
0
 def erase_data(self, task_id):
     # TODO(wickman)
     # This could be potentially dangerous if somebody naively runs their sandboxes in e.g.
     # $HOME or / or similar.  Perhaps put a guard somewhere?
     for fn in self.get_data(task_id, with_size=False):
         os.unlink(fn)
     state = self.state(task_id)
     if state and state.header and state.header.sandbox:
         safe_rmtree(state.header.sandbox)
Exemplo n.º 20
0
 def teardown_class(cls):
   if 'THERMOS_DEBUG' not in os.environ:
     safe_rmtree(cls.LOG_DIR)
     if cls.PEX_PATH:
       safe_rmtree(os.path.dirname(cls.PEX_PATH))
   else:
     print('Saving executor logs in %s' % cls.LOG_DIR)
     if cls.PEX_PATH:
       print('Saved thermos executor at %s' % cls.PEX_PATH)
Exemplo n.º 21
0
  def invalidate(self, cache_key):
    """Invalidates this cache key and any cached files associated with it.

    :param cache_key: A CacheKey object (as returned by BuildCache.key_for().
    """
    safe_rmtree(cache_key.filename)
    sha_file = self._sha_file(cache_key)
    if os.path.exists(sha_file):
      os.unlink(sha_file)
Exemplo n.º 22
0
 def test_split_existing(self):
   td = tempfile.mkdtemp()
   try:
     assert Nested._split_existing(td) == (td, '.')
     assert Nested._split_existing(td + os.sep) == (td, '.')
     assert Nested._split_existing(os.path.join(td, 'a', 'b', 'c')) == (
       td, os.path.join('a', 'b', 'c'))
     assert Nested._split_existing(os.path.join(td, 'a', '..', 'c')) == (td, 'c')
   finally:
     safe_rmtree(td)
Exemplo n.º 23
0
 def erase_logs(self, task_id):
     for fn in self.get_logs(task_id, with_size=False):
         safe_delete(fn)
     state = self.state(task_id)
     if state and state.header:
         safe_rmtree(
             TaskPath(
                 root=self._root,
                 task_id=task_id,
                 log_dir=state.header.log_dir).getpath('process_logbase'))
Exemplo n.º 24
0
  def _compile(self, versioned_target_set, classpath, upstream_analysis_files):
    """Actually compile some targets.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in versioned_target_set are up-to-date, as if each
                   were compiled individually.
    """
    # Note: We actually compile all the targets in the set in a single zinc call, because
    # compiler invocation overhead is high, but this fact is not exposed outside this method.
    classes_dir, depfile, analysis_file = self._output_paths(versioned_target_set.targets)
    safe_mkdir(classes_dir)

    # Get anything we have from previous builds.
    self._merge_artifact(versioned_target_set)

    # Compute the sources we need to compile.
    sources_by_target = ScalaCompile._calculate_sources(versioned_target_set.targets)

    if sources_by_target:
      sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
      if not sources:
        self.context.log.warn('Skipping scala compile for targets with no sources:\n  %s' %
                              '\n  '.join(str(t) for t in sources_by_target.keys()))
      else:
        # Invoke the compiler.
        self.context.log.info('Compiling targets %s' % versioned_target_set.targets)
        if self._zinc_utils.compile(classpath, sources, classes_dir, analysis_file,
                                    upstream_analysis_files, depfile):
          raise TaskError('Compile failed.')

        # Read in the deps we just created.
        self.context.log.debug('Reading dependencies from ' + depfile)
        deps = Dependencies(classes_dir)
        deps.load(depfile)

        # Split the artifact into per-target artifacts.
        self._split_artifact(deps, versioned_target_set)

        # Write to artifact cache, if needed.
        for vt in versioned_target_set.versioned_targets:
          vt_classes_dir, vt_depfile, vt_analysis_file = self._output_paths(vt.targets)
          vt_portable_analysis_file = _portable(vt_analysis_file)
          if self._artifact_cache and self.context.options.write_to_artifact_cache:
            # Relativize the analysis.
            # TODO: Relativize before splitting? This will require changes to Zinc, which currently
            # eliminates paths it doesn't recognize (including our placeholders) when splitting.
            if os.path.exists(vt_analysis_file) and \
                self._zinc_utils.relativize_analysis_file(vt_analysis_file, vt_portable_analysis_file):
              raise TaskError('Zinc failed to relativize analysis file: %s' % vt_analysis_file)
            # Write the per-target artifacts to the cache.
            artifacts = [vt_classes_dir, vt_depfile, vt_portable_analysis_file]
            self.update_artifact_cache(vt, artifacts)
          else:
            safe_rmtree(vt_portable_analysis_file)  # Don't leave cruft lying around.
Exemplo n.º 25
0
 def teardown_class(cls):
   if 'THERMOS_DEBUG' not in os.environ:
     safe_rmtree(cls.LOG_DIR)
     thermos_path = thermos_runner_path(build=False)
     if thermos_path:
       safe_rmtree(os.path.dirname(thermos_path))
   else:
     print('Saving executor logs in %s' % cls.LOG_DIR)
     thermos_path = thermos_runner_path(build=False)
     if thermos_path:
       print('Saved thermos executor at %s' % thermos_path)
Exemplo n.º 26
0
 def teardown_class(cls):
     if 'THERMOS_DEBUG' not in os.environ:
         safe_rmtree(cls.LOG_DIR)
         thermos_path = thermos_runner_path(build=False)
         if thermos_path:
             safe_rmtree(os.path.dirname(thermos_path))
     else:
         print('Saving executor logs in %s' % cls.LOG_DIR)
         thermos_path = thermos_runner_path(build=False)
         if thermos_path:
             print('Saved thermos executor at %s' % thermos_path)
Exemplo n.º 27
0
 def try_insert(self, cache_key, build_artifacts):
   cache_dir = self._cache_dir_for_key(cache_key)
   safe_rmtree(cache_dir)
   for artifact in build_artifacts or ():
     rel_path = os.path.relpath(artifact, self.artifact_root)
     assert not rel_path.startswith('..'), \
       'Artifact %s is not under artifact root %s' % (artifact, self.artifact_root)
     artifact_dest = os.path.join(cache_dir, rel_path)
     safe_mkdir(os.path.dirname(artifact_dest))
     if os.path.isdir(artifact):
       shutil.copytree(artifact, artifact_dest)
     else:
       shutil.copy(artifact, artifact_dest)
Exemplo n.º 28
0
 def try_insert(self, cache_key, build_artifacts):
     cache_dir = self._cache_dir_for_key(cache_key)
     safe_rmtree(cache_dir)
     for artifact in build_artifacts or ():
         rel_path = os.path.relpath(artifact, self.artifact_root)
         assert not rel_path.startswith('..'), \
           'Artifact %s is not under artifact root %s' % (artifact, self.artifact_root)
         artifact_dest = os.path.join(cache_dir, rel_path)
         safe_mkdir(os.path.dirname(artifact_dest))
         if os.path.isdir(artifact):
             shutil.copytree(artifact, artifact_dest)
         else:
             shutil.copy(artifact, artifact_dest)
Exemplo n.º 29
0
  def merge_artifact(self, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return

    with temporary_dir() as tmpdir:
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths(versioned_target_set.targets)
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)
      src_analysis_caches = []

      # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success.
      dst_deps = Dependencies(dst_output_dir)

      for target in versioned_target_set.targets:
        src_output_dir, src_depfile, src_analysis_cache = self.create_output_paths([target])
        if os.path.exists(src_depfile):
          src_deps = Dependencies(src_output_dir)
          src_deps.load(src_depfile)
          dst_deps.merge(src_deps)

          classes_by_source = src_deps.findclasses([target]).get(target, {})
          for source, classes in classes_by_source.items():
            for cls in classes:
              src = os.path.join(src_output_dir, cls)
              dst = os.path.join(dst_output_dir, cls)
              # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that
              # it's missing and rebuild it.
              # dst may already exist if we have overlapping targets. It's not a good idea
              # to have those, but until we enforce it, we must allow it here.
              if os.path.exists(src) and not os.path.exists(dst):
                # Copy the class file.
                safe_mkdir(os.path.dirname(dst))
                os.link(src, dst)

          # Rebase a copy of the per-target analysis files prior to merging.
          if os.path.exists(src_analysis_cache):
            src_analysis_cache_tmp = \
              os.path.join(tmpdir, os.path.relpath(src_analysis_cache, self._analysis_cache_dir))
            shutil.copyfile(src_analysis_cache, src_analysis_cache_tmp)
            src_analysis_caches.append(src_analysis_cache_tmp)
            if self._zinc_utils.run_zinc_rebase(cache=src_analysis_cache_tmp, rebasings=[(src_output_dir, dst_output_dir)]):
              self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. ' \
                                    'Target may require a full rebuild.' % \
                                    src_analysis_cache_tmp)

      dst_deps.save(dst_depfile)

      if self._zinc_utils.run_zinc_merge(src_caches=src_analysis_caches, dst_cache=dst_analysis_cache):
        self.context.log.warn('zinc failed to merge analysis files %s to %s. ' \
                              'Target may require a full rebuild.' % \
                              (':'.join(src_analysis_caches), dst_analysis_cache))
Exemplo n.º 30
0
 def _ensure_analysis_tmpdir(self):
   # Do this lazily, so we don't trigger creation of a worker pool unless we need it.
   if not os.path.exists(self._analysis_tmpdir):
     os.makedirs(self._analysis_tmpdir)
     if self._delete_scratch:
       self.context.background_worker_pool().add_shutdown_hook(
           lambda: safe_rmtree(self._analysis_tmpdir))
Exemplo n.º 31
0
 def _ensure_analysis_tmpdir(self):
     # Do this lazily, so we don't trigger creation of a worker pool unless we need it.
     if not os.path.exists(self._analysis_tmpdir):
         os.makedirs(self._analysis_tmpdir)
         if self._delete_scratch:
             self.context.background_worker_pool().add_shutdown_hook(
                 lambda: safe_rmtree(self._analysis_tmpdir))
Exemplo n.º 32
0
def initial_reporting(config, run_tracker):
  """Sets up the initial reporting configuration.

  Will be changed after we parse cmd-line flags.
  """
  reports_dir = config.get('reporting', 'reports_dir',
                           default=os.path.join(config.getdefault('pants_workdir'), 'reports'))
  link_to_latest = os.path.join(reports_dir, 'latest')
  if os.path.exists(link_to_latest):
    os.unlink(link_to_latest)

  run_id = run_tracker.run_info.get_info('id')
  if run_id is None:
    raise ReportingError('No run_id set')
  run_dir = os.path.join(reports_dir, run_id)
  safe_rmtree(run_dir)

  html_dir = os.path.join(run_dir, 'html')
  safe_mkdir(html_dir)
  os.symlink(run_dir, link_to_latest)

  report = Report()

  # Capture initial console reporting into a buffer. We'll do something with it once
  # we know what the cmd-line flag settings are.
  outfile = StringIO()
  capturing_reporter_settings = PlainTextReporter.Settings(outfile=outfile, log_level=Report.INFO,
                                                           color=False, indent=True, timing=False,
                                                           cache_stats=False)
  capturing_reporter = PlainTextReporter(run_tracker, capturing_reporter_settings)
  report.add_reporter('capturing', capturing_reporter)

  # Set up HTML reporting. We always want that.
  template_dir = config.get('reporting', 'reports_template_dir')
  html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO,
                                                 html_dir=html_dir,
                                                 template_dir=template_dir)
  html_reporter = HtmlReporter(run_tracker, html_reporter_settings)
  report.add_reporter('html', html_reporter)

  # Add some useful RunInfo.
  run_tracker.run_info.add_info('default_report', html_reporter.report_path())
  port = ReportingServerManager.get_current_server_port()
  if port:
    run_tracker.run_info.add_info('report_url', 'http://localhost:%d/run/%s' % (port, run_id))

  return report
Exemplo n.º 33
0
def setup_tree(td, lose=False):
  safe_rmtree(td)

  # TODO(wickman) These should be referred as resources= in the python_target instead.
  shutil.copytree('src/test/resources/org/apache/thermos/root', td)

  if lose:
    lost_age = time.time() - (
      2 * ThinTestThermosGCExecutor.MAX_CHECKPOINT_TIME_DRIFT.as_(Time.SECONDS))
    utime = (lost_age, lost_age)
  else:
    utime = None

  # touch everything
  for root, dirs, files in os.walk(td):
    for fn in files:
      os.utime(os.path.join(root, fn), utime)
Exemplo n.º 34
0
    def execute(self):
        config = Config.load()
        distdir = config.getdefault('pants_distdir')
        setup_dir = os.path.join(
            distdir, '%s-%s' %
            (self.target.provides._name, self.target.provides._version))
        chroot = Chroot(distdir, name=self.target.provides._name)
        self.write_sources(chroot)
        self.write_setup(chroot)
        if os.path.exists(setup_dir):
            import shutil
            shutil.rmtree(setup_dir)
        os.rename(chroot.path(), setup_dir)

        with pushd(setup_dir):
            cmd = '%s setup.py %s' % (sys.executable, self.options.run
                                      or 'sdist')
            print('Running "%s" in %s' % (cmd, setup_dir))
            extra_args = {} if self.options.run else dict(
                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            po = subprocess.Popen(cmd, shell=True, **extra_args)
            po.wait()

        if self.options.run:
            print('Ran %s' % cmd)
            print('Output in %s' % setup_dir)
            return po.returncode
        elif po.returncode != 0:
            print('Failed to run %s!' % cmd)
            for line in po.stdout.read().splitlines():
                print('stdout: %s' % line)
            for line in po.stderr.read().splitlines():
                print('stderr: %s' % line)
            return po.returncode

        expected_tgz = '%s-%s.tar.gz' % (self.target.provides._name,
                                         self.target.provides._version)
        expected_target = os.path.join(setup_dir, 'dist', expected_tgz)
        dist_tgz = os.path.join(distdir, expected_tgz)
        if not os.path.exists(expected_target):
            print('Could not find expected target %s!' % expected_target)
            sys.exit(1)
        safe_delete(dist_tgz)
        os.rename(expected_target, dist_tgz)
        print('Wrote %s' % dist_tgz)
        safe_rmtree(setup_dir)
def setup_tree(td, lose=False):
  safe_rmtree(td)
  
  # TODO(wickman) These should be referred as resources= in the python_target instead.
  shutil.copytree('src/resources/org/apache/thermos/root', td)

  if lose:
    lost_age = time.time() - (
      2 * ThinTestThermosGCExecutor.MAX_CHECKPOINT_TIME_DRIFT.as_(Time.SECONDS))
    utime = (lost_age, lost_age)
  else:
    utime = None

  # touch everything
  for root, dirs, files in os.walk(td):
    for fn in files:
      os.utime(os.path.join(root, fn), utime)
Exemplo n.º 36
0
 def locally_cache(dist, cache_dir):
   from pkg_resources import PathMetadata, Distribution
   from twitter.common.dirutil import safe_rmtree, safe_open, safe_mkdir
   egg_name = os.path.join(cache_dir, dist.egg_name() + '.egg')
   safe_mkdir(cache_dir)
   if not os.path.exists(egg_name):
     egg_tmp_path = tempfile.mkdtemp(dir=cache_dir, prefix=dist.egg_name())
     for fn, content in DistributionHelper.walk(dist):
       with safe_open(os.path.join(egg_tmp_path, fn), 'wb') as fp:
         fp.write(content)
     try:
       os.rename(egg_tmp_path, egg_name)
     except OSError as e:
       # Handle the race condition of other people trying to write into the target cache.
       if e.errno == errno.ENOTEMPTY:
         safe_rmtree(egg_tmp_path)
   metadata = PathMetadata(egg_name, os.path.join(egg_name, 'EGG-INFO'))
   return Distribution.from_filename(egg_name, metadata=metadata)
Exemplo n.º 37
0
Arquivo: util.py Projeto: xianxu/pants
 def maybe_locally_cache(dist, cache_dir):
     from pkg_resources import PathMetadata, Distribution
     from twitter.common.dirutil import safe_rmtree, safe_open, safe_mkdir
     egg_name = os.path.join(cache_dir, dist.egg_name() + '.egg')
     safe_mkdir(cache_dir)
     if not os.path.exists(egg_name):
         egg_tmp_path = tempfile.mkdtemp(dir=cache_dir,
                                         prefix=dist.egg_name())
         for fn, content in DistributionHelper.walk(dist):
             with safe_open(os.path.join(egg_tmp_path, fn), 'wb') as fp:
                 fp.write(content)
         try:
             os.rename(egg_tmp_path, egg_name)
         except OSError as e:
             # Handle the race condition of other people trying to write into the target cache.
             if e.errno == errno.ENOTEMPTY:
                 safe_rmtree(egg_tmp_path)
     metadata = PathMetadata(egg_name, os.path.join(egg_name, 'EGG-INFO'))
     return Distribution.from_filename(egg_name, metadata=metadata)
Exemplo n.º 38
0
  def execute(self):
    dist_dir = self._config.getdefault('pants_distdir')
    target_base = '%s-%s' % (
        self.target.provides.name, self.target.provides.version)
    setup_dir = os.path.join(dist_dir, target_base)
    expected_tgz = '%s.tar.gz' % target_base
    expected_target = os.path.join(setup_dir, 'dist', expected_tgz)
    dist_tgz = os.path.join(dist_dir, expected_tgz)

    chroot = Chroot(dist_dir, name=self.target.provides.name)
    self.write_contents(chroot)
    self.write_setup(chroot)
    safe_rmtree(setup_dir)
    os.rename(chroot.path(), setup_dir)

    with pushd(setup_dir):
      cmd = '%s setup.py %s' % (sys.executable, self.options.run or 'sdist')
      print('Running "%s" in %s' % (cmd, setup_dir))
      extra_args = {} if self.options.run else dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE)
      po = subprocess.Popen(cmd, shell=True, **extra_args)
      stdout, stderr = po.communicate()

    if self.options.run:
      print('Ran %s' % cmd)
      print('Output in %s' % setup_dir)
      return po.returncode
    elif po.returncode != 0:
      print('Failed to run %s!' % cmd)
      for line in ''.join(stdout).splitlines():
        print('stdout: %s' % line)
      for line in ''.join(stderr).splitlines():
        print('stderr: %s' % line)
      return po.returncode
    else:
      if not os.path.exists(expected_target):
        print('Could not find expected target %s!' % expected_target)
        sys.exit(1)

      safe_delete(dist_tgz)
      os.rename(expected_target, dist_tgz)
      safe_rmtree(setup_dir)

      print('Wrote %s' % dist_tgz)
Exemplo n.º 39
0
  def split_artifact(self, deps, versioned_target_set):
    if len(versioned_target_set.targets) <= 1:
      return
    classes_by_source_by_target = deps.findclasses(versioned_target_set.targets)
    src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets)
    analysis_splits = []  # List of triples of (list of sources, destination output dir, destination analysis cache).
    # for dependency analysis, we need to record the cache files that we create in the split

    for target in versioned_target_set.targets:
      classes_by_source = classes_by_source_by_target.get(target, {})
      dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target])
      safe_rmtree(dst_output_dir)
      safe_mkdir(dst_output_dir)

      sources = []
      dst_deps = Dependencies(dst_output_dir)

      for source, classes in classes_by_source.items():
        src = os.path.join(target.target_base, source)
        dst_deps.add(src, classes)
        sources.append(os.path.join(target.target_base, source))
        for cls in classes:
          # Copy the class file.
          dst = os.path.join(dst_output_dir, cls)
          safe_mkdir(os.path.dirname(dst))
          os.link(os.path.join(src_output_dir, cls), dst)
      dst_deps.save(dst_depfile)
      analysis_splits.append((sources, dst_output_dir, dst_analysis_cache))
      self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache))
    # Split the analysis files.
    if os.path.exists(src_analysis_cache):
      if self._zinc_utils.run_zinc_split(src_cache=src_analysis_cache, splits=[(x[0], x[2]) for x in analysis_splits]):
        raise TaskError, 'zinc failed to split analysis files %s from %s' %\
                         (':'.join([x[2] for x in analysis_splits]), src_analysis_cache)

      # Now rebase the newly created analysis files.
      for split in analysis_splits:
        dst_analysis_cache = split[2]
        dst_output_dir = split[1]
        if os.path.exists(dst_analysis_cache):
          rebasings = [(src_output_dir, dst_output_dir)]
          if self._zinc_utils.run_zinc_rebase(cache=dst_analysis_cache, rebasings=rebasings):
            raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
Exemplo n.º 40
0
def default_report(config, run_tracker):
    """Sets up the default reporting configuration."""
    reports_dir = config.get('reporting', 'reports_dir')
    link_to_latest = os.path.join(reports_dir, 'latest')
    if os.path.exists(link_to_latest):
        os.unlink(link_to_latest)

    run_id = run_tracker.run_info.get_info('id')
    if run_id is None:
        raise ReportingError('No run_id set')
    run_dir = os.path.join(reports_dir, run_id)
    safe_rmtree(run_dir)

    html_dir = os.path.join(run_dir, 'html')
    safe_mkdir(html_dir)
    os.symlink(run_dir, link_to_latest)

    report = Report()

    console_reporter_settings = ConsoleReporter.Settings(log_level=Report.INFO,
                                                         color=False,
                                                         indent=True,
                                                         timing=False,
                                                         cache_stats=False)
    console_reporter = ConsoleReporter(run_tracker, console_reporter_settings)

    template_dir = config.get('reporting', 'reports_template_dir')
    html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO,
                                                   html_dir=html_dir,
                                                   template_dir=template_dir)
    html_reporter = HtmlReporter(run_tracker, html_reporter_settings)

    report.add_reporter('console', console_reporter)
    report.add_reporter('html', html_reporter)

    run_tracker.run_info.add_info('default_report',
                                  html_reporter.report_path())
    port = config.getint('reporting', 'reporting_port', -1)
    run_tracker.run_info.add_info(
        'report_url', 'http://localhost:%d/run/%s' % (port, run_id))

    return report
Exemplo n.º 41
0
  def execute(self):
    config = Config.load()
    distdir = config.getdefault('pants_distdir')
    setup_dir = os.path.join(distdir, '%s-%s' % (
        self.target.provides._name, self.target.provides._version))
    chroot = Chroot(distdir, name=self.target.provides._name)
    self.write_sources(chroot)
    self.write_setup(chroot)
    if os.path.exists(setup_dir):
      import shutil
      shutil.rmtree(setup_dir)
    os.rename(chroot.path(), setup_dir)

    with pushd(setup_dir):
      cmd = '%s setup.py %s' % (sys.executable, self.options.run or 'sdist')
      print('Running "%s" in %s' % (cmd, setup_dir))
      extra_args = {} if self.options.run else dict(stdout=subprocess.PIPE, stderr=subprocess.PIPE)
      po = subprocess.Popen(cmd, shell=True, **extra_args)
      po.wait()

    if self.options.run:
      print('Ran %s' % cmd)
      print('Output in %s' % setup_dir)
      return po.returncode
    elif po.returncode != 0:
      print('Failed to run %s!' % cmd)
      for line in po.stdout.read().splitlines():
        print('stdout: %s' % line)
      for line in po.stderr.read().splitlines():
        print('stderr: %s' % line)
      return po.returncode

    expected_tgz = '%s-%s.tar.gz' % (self.target.provides._name, self.target.provides._version)
    expected_target = os.path.join(setup_dir, 'dist', expected_tgz)
    dist_tgz = os.path.join(distdir, expected_tgz)
    if not os.path.exists(expected_target):
      print('Could not find expected target %s!' % expected_target)
      sys.exit(1)
    safe_delete(dist_tgz)
    os.rename(expected_target, dist_tgz)
    print('Wrote %s' % dist_tgz)
    safe_rmtree(setup_dir)
Exemplo n.º 42
0
    def run_one(self, target):
        dist_dir = self._config.getdefault('pants_distdir')
        chroot = Chroot(dist_dir, name=target.provides.name)
        self.write_contents(target, chroot)
        self.write_setup(target, chroot)
        target_base = '%s-%s' % (target.provides.name, target.provides.version)
        setup_dir = os.path.join(dist_dir, target_base)
        safe_rmtree(setup_dir)
        os.rename(chroot.path(), setup_dir)

        if not self.options.run:
            print('Running packager against %s' % setup_dir)
            setup_runner = Packager(setup_dir)
            tgz_name = os.path.basename(setup_runner.sdist())
            print('Writing %s' % os.path.join(dist_dir, tgz_name))
            os.rename(setup_runner.sdist(), os.path.join(dist_dir, tgz_name))
            safe_rmtree(setup_dir)
        else:
            print('Running %s against %s' % (self.options.run, setup_dir))
            setup_runner = SetupPyRunner(setup_dir, self.options.run)
            setup_runner.run()
Exemplo n.º 43
0
  def run_one(self, target):
    dist_dir = self._config.getdefault('pants_distdir')
    chroot = Chroot(dist_dir, name=target.provides.name)
    self.write_contents(target, chroot)
    self.write_setup(target, chroot)
    target_base = '%s-%s' % (target.provides.name, target.provides.version)
    setup_dir = os.path.join(dist_dir, target_base)
    safe_rmtree(setup_dir)
    os.rename(chroot.path(), setup_dir)

    if not self.options.run:
      print('Running packager against %s' % setup_dir)
      setup_runner = Packager(setup_dir)
      tgz_name = os.path.basename(setup_runner.sdist())
      print('Writing %s' % os.path.join(dist_dir, tgz_name))
      os.rename(setup_runner.sdist(), os.path.join(dist_dir, tgz_name))
      safe_rmtree(setup_dir)
    else:
      print('Running %s against %s' % (self.options.run, setup_dir))
      setup_runner = SetupPyRunner(setup_dir, self.options.run)
      setup_runner.run()
Exemplo n.º 44
0
  def translate(self, link):
    """From a link, translate a distribution."""
    if not isinstance(link, SourceLink):
      return None

    unpack_path, installer = None, None
    try:
      unpack_path = link.fetch(conn_timeout=self._conn_timeout)
      with TRACER.timed('Installing %s' % link.name):
        installer = Installer(unpack_path, strict=(link.name != 'distribute'))
      with TRACER.timed('Distilling %s' % link.name):
        try:
          dist = installer.distribution()
        except Installer.InstallFailure as e:
          return None
        return dist_from_egg(Distiller(dist).distill(into=self._install_cache))
    finally:
      if installer:
        installer.cleanup()
      if unpack_path:
        safe_rmtree(unpack_path)
Exemplo n.º 45
0
    def _merge_classes_dir(self, state):
        """Merge the classes dirs from the underlying artifacts into a single dir.

    May symlink instead of copying, when it's OK to do so.

    Postcondition: symlinks are of leaf packages only.
    """
        self.log.debug('Merging classes dirs into %s' % self.classes_dir)
        safe_rmtree(self.classes_dir)
        symlinkable_packages = self._symlinkable_packages(state)
        for artifact in self.underlying_artifacts:
            classnames_by_package = defaultdict(list)
            for cls in state.classes_by_target.get(artifact.targets[0], []):
                classnames_by_package[os.path.dirname(cls)].append(
                    os.path.basename(cls))

            for package, classnames in classnames_by_package.items():
                if package == "":
                    raise TaskError("Found class files %s with empty package" %
                                    classnames)
                artifact_package_dir = os.path.join(artifact.classes_dir,
                                                    package)
                merged_package_dir = os.path.join(self.classes_dir, package)

                if package in symlinkable_packages:
                    if os.path.islink(merged_package_dir):
                        assert os.readlink(
                            merged_package_dir) == artifact_package_dir
                    elif os.path.exists(merged_package_dir):
                        safe_rmtree(merged_package_dir)
                        os.symlink(artifact_package_dir, merged_package_dir)
                    else:
                        safe_mkdir(os.path.dirname(merged_package_dir))
                        os.symlink(artifact_package_dir, merged_package_dir)
                else:
                    safe_mkdir(merged_package_dir)
                    for classname in classnames:
                        src = os.path.join(artifact_package_dir, classname)
                        dst = os.path.join(merged_package_dir, classname)
                        self._maybe_hardlink(src, dst)
Exemplo n.º 46
0
def test_mkdtemp_setup_teardown():
    m = mox.Mox()

    def faux_cleaner():
        pass

    DIR1, DIR2 = 'fake_dir1__does_not_exist', 'fake_dir2__does_not_exist'
    m.StubOutWithMock(atexit, 'register')
    m.StubOutWithMock(os, 'getpid')
    m.StubOutWithMock(tempfile, 'mkdtemp')
    m.StubOutWithMock(dirutil, 'safe_rmtree')
    atexit.register(faux_cleaner)  # ensure only called once
    tempfile.mkdtemp(dir='1').AndReturn(DIR1)
    tempfile.mkdtemp(dir='2').AndReturn(DIR2)
    os.getpid().MultipleTimes().AndReturn('unicorn')
    dirutil.safe_rmtree(DIR1)
    dirutil.safe_rmtree(DIR2)
    # make sure other "pids" are not cleaned
    dirutil._MKDTEMP_DIRS['fluffypants'].add('yoyo')

    try:
        m.ReplayAll()
        assert dirutil.safe_mkdtemp(dir='1', cleaner=faux_cleaner) == DIR1
        assert dirutil.safe_mkdtemp(dir='2', cleaner=faux_cleaner) == DIR2
        assert 'unicorn' in dirutil._MKDTEMP_DIRS
        assert dirutil._MKDTEMP_DIRS['unicorn'] == set([DIR1, DIR2])
        dirutil._mkdtemp_atexit_cleaner()
        assert 'unicorn' not in dirutil._MKDTEMP_DIRS
        assert dirutil._MKDTEMP_DIRS['fluffypants'] == set(['yoyo'])

    finally:
        dirutil._MKDTEMP_DIRS.pop('unicorn', None)
        dirutil._MKDTEMP_DIRS.pop('fluffypants', None)
        dirutil._mkdtemp_unregister_cleaner()

        m.UnsetStubs()
        m.VerifyAll()
Exemplo n.º 47
0
def test_mkdtemp_setup_teardown():
  m = mox.Mox()

  def faux_cleaner():
    pass

  DIR1, DIR2 = 'fake_dir1__does_not_exist', 'fake_dir2__does_not_exist'
  m.StubOutWithMock(atexit, 'register')
  m.StubOutWithMock(os, 'getpid')
  m.StubOutWithMock(tempfile, 'mkdtemp')
  m.StubOutWithMock(dirutil, 'safe_rmtree')
  atexit.register(faux_cleaner) # ensure only called once
  tempfile.mkdtemp(dir='1').AndReturn(DIR1)
  tempfile.mkdtemp(dir='2').AndReturn(DIR2)
  os.getpid().MultipleTimes().AndReturn('unicorn')
  dirutil.safe_rmtree(DIR1)
  dirutil.safe_rmtree(DIR2)
  # make sure other "pids" are not cleaned
  dirutil._MKDTEMP_DIRS['fluffypants'].add('yoyo')

  try:
    m.ReplayAll()
    assert dirutil.safe_mkdtemp(dir='1', cleaner=faux_cleaner) == DIR1
    assert dirutil.safe_mkdtemp(dir='2', cleaner=faux_cleaner) == DIR2
    assert 'unicorn' in dirutil._MKDTEMP_DIRS
    assert dirutil._MKDTEMP_DIRS['unicorn'] == set([DIR1, DIR2])
    dirutil._mkdtemp_atexit_cleaner()
    assert 'unicorn' not in dirutil._MKDTEMP_DIRS
    assert dirutil._MKDTEMP_DIRS['fluffypants'] == set(['yoyo'])

  finally:
    dirutil._MKDTEMP_DIRS.pop('unicorn', None)
    dirutil._MKDTEMP_DIRS.pop('fluffypants', None)
    dirutil._mkdtemp_unregister_cleaner()

    m.UnsetStubs()
    m.VerifyAll()
Exemplo n.º 48
0
    def insert(self, cache_key, build_artifacts, artifact_root=None):
        """Cache the output of a build.

    If there is an existing set of artifacts for this key they are deleted.

    TODO: Check that they're equal? If they aren't it's a grave bug, since the key is supposed
    to be a fingerprint of all possible inputs to the build.

    :param cache_key: A CacheKey object.
    :param build_artifacts: List of paths to generated artifacts under artifact_root.
    :param artifact_root: Optional root directory under which artifacts are stored.
    """
        cache_dir = self._cache_dir_for_key(cache_key)
        try:
            safe_rmtree(cache_dir)
            for artifact in build_artifacts or ():
                rel_path = os.path.basename(artifact) \
                if artifact_root is None \
                else os.path.relpath(artifact, artifact_root)
                assert not rel_path.startswith('..'), \
                  'Weird: artifact=%s, rel_path=%s' % (artifact, rel_path)
                artifact_dest = os.path.join(cache_dir, rel_path)
                dir_name = os.path.dirname(artifact_dest)
                safe_mkdir(dir_name)
                if os.path.isdir(artifact):
                    shutil.copytree(artifact, artifact_dest)
                else:
                    shutil.copy(artifact, artifact_dest)
        except Exception as e:
            try:
                safe_rmtree(cache_dir)
            except Exception as e:
                print(
                    'IMPORTANT: failed to delete %s on error. Your artifact cache may be corrupted. '
                    'Please delete manually.' % cache_dir)
            raise e
Exemplo n.º 49
0
def default_report(config, run_tracker):
  """Sets up the default reporting configuration."""
  reports_dir = config.get('reporting', 'reports_dir')
  link_to_latest = os.path.join(reports_dir, 'latest')
  if os.path.exists(link_to_latest):
    os.unlink(link_to_latest)

  run_id = run_tracker.run_info.get_info('id')
  if run_id is None:
    raise ReportingError('No run_id set')
  run_dir = os.path.join(reports_dir, run_id)
  safe_rmtree(run_dir)

  html_dir = os.path.join(run_dir, 'html')
  safe_mkdir(html_dir)
  os.symlink(run_dir, link_to_latest)

  report = Report()

  console_reporter_settings = ConsoleReporter.Settings(log_level=Report.INFO, color=False,
                                                       indent=True, timing=False, cache_stats=False)
  console_reporter = ConsoleReporter(run_tracker, console_reporter_settings)

  template_dir = config.get('reporting', 'reports_template_dir')
  html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO,
                                                 html_dir=html_dir, template_dir=template_dir)
  html_reporter = HtmlReporter(run_tracker, html_reporter_settings)

  report.add_reporter('console', console_reporter)
  report.add_reporter('html', html_reporter)

  run_tracker.run_info.add_info('default_report', html_reporter.report_path())
  port = config.getint('reporting', 'reporting_port', -1)
  run_tracker.run_info.add_info('report_url', 'http://localhost:%d/run/%s' % (port, run_id))

  return report
Exemplo n.º 50
0
 def destroy(self):
   safe_rmtree(self._chroot)
Exemplo n.º 51
0
from twitter.pants.tasks.scala_compile import ScalaCompile
from twitter.pants.tasks.scala_repl import ScalaRepl
from twitter.pants.tasks.specs_run import SpecsRun
from twitter.pants.tasks.thrift_gen import ThriftGen
from twitter.pants.tasks.thriftstore_dml_gen import ThriftstoreDMLGen


class Invalidator(Task):
  def execute(self, targets):
    self.invalidate(all=True)
goal(name='invalidate', action=Invalidator).install().with_description('Invalidate all caches')


goal(
  name='clean-all',
  action=lambda ctx: safe_rmtree(ctx.config.getdefault('pants_workdir')),
  dependencies=['invalidate']
).install().with_description('Cleans all intermediate build output')


if NailgunTask.killall:
  class NailgunKillall(Task):
    @classmethod
    def setup_parser(cls, option_group, args, mkflag):
      option_group.add_option(mkflag("everywhere"), dest="ng_killall_evywhere",
                              default=False, action="store_true",
                              help="[%default] Kill all nailguns servers launched by pants for "
                                   "all workspaces on the system.")

    def execute(self, targets):
      if NailgunTask.killall:
Exemplo n.º 52
0
 def tearDownClass(cls):
     safe_rmtree(cls.origin)
     safe_rmtree(cls.gitdir)
     safe_rmtree(cls.worktree)
     safe_rmtree(cls.clone2)
Exemplo n.º 53
0
 def __del__(self):
     if os.getenv('PANTS_LEAVE_CHROOT') is None:
         safe_rmtree(self.path())
Exemplo n.º 54
0
    def report(self, targets, tests, junit_classpath):
        # Link files in the real source tree to files named using the classname.
        # Do not include class file names containing '$', as these will always have
        # a corresponding $-less class file, and they all point back to the same
        # source.
        # Put all these links to sources under self._coverage_dir/src
        all_classes = set()
        for basedir, classes in self._rootdirs.items():
            all_classes.update([cls for cls in classes if '$' not in cls])
        sources_by_class = self._build_sources_by_class()
        coverage_source_root_dir = os.path.join(self._coverage_dir, 'src')
        safe_rmtree(coverage_source_root_dir)
        for cls in all_classes:
            source_file = sources_by_class.get(cls)
            if source_file:
                # the class in @cls
                #    (e.g., 'com/pants/example/hello/welcome/WelcomeEverybody.class')
                # was compiled from the file in @source_file
                #    (e.g., 'src/scala/com/pants/example/hello/welcome/Welcome.scala')
                # Note that, in the case of scala files, the path leading up to Welcome.scala does not
                # have to match the path in the corresponding .class file AT ALL. In this example,
                # @source_file could very well have been 'src/hello-kitty/Welcome.scala'.
                # However, cobertura expects the class file path to match the corresponding source
                # file path below the source base directory(ies) (passed as (a) positional argument(s)),
                # while it still gets the source file basename from the .class file.
                # Here we create a fake hierachy under coverage_dir/src to mimic what cobertura expects.

                class_dir = os.path.dirname(
                    cls)  # e.g., 'com/pants/example/hello/welcome'
                fake_source_directory = os.path.join(coverage_source_root_dir,
                                                     class_dir)
                safe_mkdir(fake_source_directory)
                fake_source_file = os.path.join(fake_source_directory,
                                                os.path.basename(source_file))
                try:
                    os.symlink(
                        os.path.relpath(source_file, fake_source_directory),
                        fake_source_file)
                except OSError as e:
                    # These warnings appear when source files contain multiple classes.
                    self._context.log.warn('Could not symlink %s to %s: %s' %
                                           (source_file, fake_source_file, e))
            else:
                self._context.log.error(
                    'class %s does not exist in a source file!' % cls)
        report_formats = []
        if self._coverage_report_xml:
            report_formats.append('xml')
        if self._coverage_report_html:
            report_formats.append('html')
        for report_format in report_formats:
            report_dir = os.path.join(self._coverage_dir, report_format)
            safe_mkdir(report_dir, clean=True)
            args = [
                coverage_source_root_dir,
                '--datafile',
                self._coverage_datafile,
                '--destination',
                report_dir,
                '--format',
                report_format,
            ]
            main = 'net.sourceforge.cobertura.reporting.ReportMain'
            result = execute_java(classpath=self._cobertura_classpath,
                                  main=main,
                                  args=args,
                                  workunit_factory=self._context.new_workunit,
                                  workunit_name='cobertura-report-' +
                                  report_format)
            if result != 0:
                raise TaskError("java %s ... exited non-zero (%i)"
                                " 'failed to report'" % (main, result))
Exemplo n.º 55
0
 def teardown_class(cls):
     if 'THERMOS_DEBUG' not in os.environ:
         safe_rmtree(cls.LOG_DIR)
     else:
         print('Saving executor logs in %s' % cls.LOG_DIR)
Exemplo n.º 56
0
 def invalidate(self, all=False):
   safe_rmtree(self._build_cache if all else self._basedir)
Exemplo n.º 57
0
    def _split_classes_dir(self, state, diff):
        """Split the merged classes dir into one dir per underlying artifact."""
        if len(self.underlying_artifacts) <= 1:
            return

        def map_classes_by_package(classes):
            # E.g., com/foo/bar/Bar.scala, com/foo/bar/Baz.scala to com/foo/bar -> [Bar.scala, Baz.scala].
            ret = defaultdict(list)
            for cls in classes:
                ret[os.path.dirname(cls)].append(os.path.basename(cls))
            return ret

        self.log.debug('Splitting classes dir %s' % self.classes_dir)
        if diff:
            new_or_changed_classnames_by_package = map_classes_by_package(
                diff.new_or_changed_classes)
            deleted_classnames_by_package = map_classes_by_package(
                diff.deleted_classes)
        else:
            new_or_changed_classnames_by_package = None
            deleted_classnames_by_package = None

        symlinkable_packages = self._symlinkable_packages(state)
        for artifact in self.underlying_artifacts:
            classnames_by_package = \
              map_classes_by_package(state.classes_by_target.get(artifact.targets[0], []))

            for package, classnames in classnames_by_package.items():
                artifact_package_dir = os.path.join(artifact.classes_dir,
                                                    package)
                merged_package_dir = os.path.join(self.classes_dir, package)

                if package in symlinkable_packages:
                    if os.path.islink(merged_package_dir):
                        current_link = os.readlink(merged_package_dir)
                        if current_link != artifact_package_dir:
                            # The code moved to a different target.
                            os.unlink(merged_package_dir)
                            safe_rmtree(artifact_package_dir)
                            shutil.move(current_link, artifact_package_dir)
                            os.symlink(artifact_package_dir,
                                       merged_package_dir)
                    else:
                        safe_rmtree(artifact_package_dir)
                        shutil.move(merged_package_dir, artifact_package_dir)
                        os.symlink(artifact_package_dir, merged_package_dir)
                else:
                    safe_mkdir(artifact_package_dir)
                    new_or_changed_classnames = \
                      set(new_or_changed_classnames_by_package.get(package, [])) if diff else None
                    for classname in classnames:
                        if not diff or classname in new_or_changed_classnames:
                            src = os.path.join(merged_package_dir, classname)
                            dst = os.path.join(artifact_package_dir, classname)
                            self._maybe_hardlink(src, dst)
                    if diff:
                        for classname in deleted_classnames_by_package.get(
                                package, []):
                            path = os.path.join(artifact_package_dir,
                                                classname)
                            if os.path.exists(path):
                                os.unlink(path)
Exemplo n.º 58
0
 def erase_metadata(self, task_id):
     for fn in self.get_metadata(task_id, with_size=False):
         safe_delete(fn)
     safe_rmtree(
         TaskPath(root=self._root,
                  task_id=task_id).getpath('checkpoint_path'))
Exemplo n.º 59
0
def install_virtualenv(context, interpreter):
  virtualenv_cache = context.config.get('python-setup', 'bootstrap_cache')
  virtualenv_target = context.config.get('python-setup', 'virtualenv_target')
  pip_repos = context.config.getlist('python-setup', 'repos')
  if not os.path.exists(virtualenv_target):
    raise TaskError('Could not find installed virtualenv!')

  env_base = context.config.getdefault('pants_pythons')

  # setup $PYTHONS/bin/INTERPRETER => interpreter.binary
  env_bin = os.path.join(env_base, 'bin')
  safe_mkdir(env_bin)
  link_target = os.path.join(env_bin, str(interpreter.identity()))
  if os.path.exists(link_target):
    os.unlink(link_target)
  os.symlink(interpreter.binary(), link_target)

  # create actual virtualenv that can be used for synthesis of pants pex
  environment_install_path = os.path.join(env_base, str(interpreter.identity()))
  virtualenv_py = os.path.join(virtualenv_target, 'virtualenv.py')
  python_interpreter = interpreter.binary()

  if os.path.exists(os.path.join(environment_install_path, 'bin', 'python')) and (
     not context.options.setup_python_force):
    return True
  else:
    safe_rmtree(environment_install_path)
    safe_mkdir(environment_install_path)

  cmdline = '%s %s --distribute %s' % (
         python_interpreter,
         virtualenv_py,
         environment_install_path)
  context.log.info('Setting up %s...' % interpreter.identity())
  context.log.debug('Running %s' % cmdline)

  rc, stdout, stderr = subprocess_call(cmdline)
  if rc != 0:
    context.log.warn('Failed to install virtualenv: err=%s' % stderr)
    context.log.info('Cleaning up %s' % interpreter.identity())
    safe_rmtree(environment_install_path)
    raise TaskError('Could not install virtualenv for %s' % interpreter.identity())

  def install_package(pkg):
    INSTALL_VIRTUALENV_PACKAGE = """
      source %(environment)s/bin/activate
      %(environment)s/bin/pip install --download-cache=%(cache)s \
         %(f_repositories)s --no-index -U %(package)s
    """ % {
      'environment': environment_install_path,
      'cache': virtualenv_cache,
      'f_repositories': ' '.join('-f %s' % repository for repository in pip_repos),
      'package': pkg
    }
    rc, stdout, stderr = subprocess_call(INSTALL_VIRTUALENV_PACKAGE)
    if rc != 0:
      context.log.warn('Failed to install %s' % pkg)
      context.log.debug('Stdout:\n%s\nStderr:\n%s\n' % (stdout, stderr))
    return rc == 0

  for package in context.config.getlist('python-setup', 'bootstrap_packages'):
    context.log.debug('Installing %s into %s' % (package, interpreter.identity()))
    if not install_package(package):
      context.log.warn('Failed to install %s into %s!' % (package, interpreter.identity()))
  return True