コード例 #1
0
    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 platforms=None,
                 interpreter=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._platforms = platforms
        self._interpreter = interpreter or PythonInterpreter.get()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._builder = builder or PEXBuilder(tempfile.mkdtemp(),
                                              interpreter=self._interpreter)

        # Note: unrelated to the general pants artifact cache.
        self._egg_cache_root = os.path.join(
            PythonSetup(self._config).scratch_dir('artifact_cache',
                                                  default_name='artifacts'),
            str(self._interpreter.identity))

        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._egg_cache_root)
コード例 #2
0
    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 interpreter=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._interpreter = interpreter or PythonInterpreter.get()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = MultiResolver(self._config,
                                       target,
                                       conn_timeout=conn_timeout)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp(),
                                              interpreter=self._interpreter)

        # Note: unrelated to the general pants artifact cache.
        self._egg_cache_root = os.path.join(
            self._config.get('python-setup', 'artifact_cache'),
            str(self._interpreter.identity))

        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._egg_cache_root)
コード例 #3
0
ファイル: cache_manager.py プロジェクト: bonifaido/commons
  def __init__(self, cache_manager, targets, per_target_cache_keys):
    self._cache_manager = cache_manager
    self.per_target_cache_keys = per_target_cache_keys

    self.targets = targets
    self.cache_key = CacheKeyGenerator.combine_cache_keys(per_target_cache_keys)
    self.valid = not cache_manager.needs_update(self.cache_key)
コード例 #4
0
 def __init__(self, context):
   self.context = context
   self.dry_run = self.can_dry_run() and self.context.options.dry_run
   self._cache_key_generator = CacheKeyGenerator()
   self._artifact_cache = None
   self._build_invalidator_dir = os.path.join(context.config.get('tasks', 'build_invalidator'),
                                              self.product_type())
コード例 #5
0
ファイル: __init__.py プロジェクト: kevints/commons
 def combine_versioned_targets(vts):
   targets = []
   for vt in vts:
     targets.extend(vt.targets)
   cache_key = CacheKeyGenerator.combine_cache_keys([vt.cache_key for vt in vts])
   valid = all([vt.valid for vt in vts])
   return VersionedTargetSet(targets, cache_key, valid)
コード例 #6
0
 def combine_versioned_targets(vts):
   targets = []
   for vt in vts:
     targets.extend(vt.targets)
   cache_key = CacheKeyGenerator.combine_cache_keys([vt.cache_key for vt in vts])
   valid = all([vt.valid for vt in vts])
   return VersionedTargetSet(targets, cache_key, valid)
コード例 #7
0
    def __init__(self, cache_manager, targets, per_target_cache_keys):
        self._cache_manager = cache_manager
        self.per_target_cache_keys = per_target_cache_keys

        self.targets = targets
        self.cache_key = CacheKeyGenerator.combine_cache_keys(
            per_target_cache_keys)
        self.valid = not cache_manager.needs_update(self.cache_key)
コード例 #8
0
ファイル: cache_manager.py プロジェクト: ewhauser/commons
 def __init__(self, cache_manager, versioned_targets):
     self._cache_manager = cache_manager
     self.versioned_targets = versioned_targets
     self.targets = [vt.target for vt in versioned_targets]
     # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method.
     self.cache_key = CacheKeyGenerator.combine_cache_keys([vt.cache_key for vt in versioned_targets])
     self.num_sources = self.cache_key.num_sources
     self.valid = not cache_manager.needs_update(self.cache_key)
コード例 #9
0
ファイル: __init__.py プロジェクト: nsanch/commons
 def __init__(self, context):
     self.context = context
     self._cache_key_generator = CacheKeyGenerator()
     # TODO: Shared, remote build cache.
     self._artifact_cache = ArtifactCache(
         context.config.get('tasks', 'artifact_cache'))
     self._build_invalidator_dir = os.path.join(
         context.config.get('tasks', 'build_invalidator'),
         self.product_type())
コード例 #10
0
 def __init__(self, cache_manager, versioned_targets):
     self._cache_manager = cache_manager
     self.versioned_targets = versioned_targets
     self.targets = [vt.target for vt in versioned_targets]
     # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method.
     self.cache_key = CacheKeyGenerator.combine_cache_keys(
         [vt.cache_key for vt in versioned_targets])
     self.num_sources = self.cache_key.num_sources
     self.valid = not cache_manager.needs_update(self.cache_key)
コード例 #11
0
    def __init__(self, target, root_dir, extra_targets=None, builder=None):
        self._config = Config.load()

        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        artifact_cache_root = \
          os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
        self._artifact_cache = ArtifactCache(artifact_cache_root)
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = PythonResolver([self._target] + self._extra_targets)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())
        self._platforms = (Platform.current(), )
        self._pythons = (sys.version[:3], )

        # TODO(wickman) Should this be in the binary builder?
        if isinstance(self._target, PythonBinary):
            self._platforms = self._target._platforms
            self._pythons = self._target._interpreters
コード例 #12
0
ファイル: task.py プロジェクト: txidol/commons
 def __init__(self, context):
     self.context = context
     self.dry_run = self.can_dry_run() and context.options.dry_run
     self._cache_key_generator = CacheKeyGenerator(
         context.config.getdefault('cache_key_gen_version', default=None))
     self._artifact_cache_spec = None
     self._artifact_cache = None
     self._build_invalidator_dir = os.path.join(
         context.config.get('tasks', 'build_invalidator'),
         self.product_type())
     self._bootstrap_utils = BootstrapUtils(self.context.products)
コード例 #13
0
ファイル: python_chroot.py プロジェクト: dynamicguy/commons
  def __init__(self, target, root_dir, extra_targets=None, builder=None, conn_timeout=None):
    self._config = Config.load()
    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = MultiResolver.from_target(self._config, target, conn_timeout=conn_timeout)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())

    # Note: unrelated to the general pants artifact cache.
    self._egg_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'),
                                        '%s' % PythonIdentity.get())
コード例 #14
0
ファイル: python_chroot.py プロジェクト: BabyDuncan/commons
  def __init__(self, target, root_dir, extra_targets=None, builder=None, conn_timeout=None):
    self._config = Config.load()
    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = MultiResolver.from_target(self._config, target, conn_timeout=conn_timeout)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())

    artifact_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'),
                                       '%s' % PythonIdentity.get())
    self._artifact_cache = FileBasedArtifactCache(None, self._root, artifact_cache_root,
                                                  self._builder.add_dependency_file)
コード例 #15
0
ファイル: python_chroot.py プロジェクト: wickman/commons
    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = MultiResolver.from_target(self._config,
                                                   target,
                                                   conn_timeout=conn_timeout)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())

        artifact_cache_root = os.path.join(
            self._config.get('python-setup', 'artifact_cache'),
            '%s' % PythonIdentity.get())
        self._artifact_cache = FileBasedArtifactCache(
            None, self._root, artifact_cache_root,
            self._builder.add_dependency_file)
コード例 #16
0
ファイル: task.py プロジェクト: testvidya11/commons
  def __init__(self, context):
    self.context = context
    self.dry_run = self.can_dry_run() and context.options.dry_run
    self._pants_workdir = self.context.config.getdefault('pants_workdir')
    self._cache_key_generator = CacheKeyGenerator(
        context.config.getdefault('cache_key_gen_version', default=None))
    self._read_artifact_cache_spec = None
    self._write_artifact_cache_spec = None
    self._artifact_cache = None
    self._artifact_cache_setup_lock = threading.Lock()

    default_invalidator_root = os.path.join(self.context.config.getdefault('pants_workdir'),
                                            'build_invalidator')
    self._build_invalidator_dir = os.path.join(
        context.config.get('tasks', 'build_invalidator', default=default_invalidator_root),
        self.product_type())
    self._jvm_tool_bootstrapper = JvmToolBootstrapper(self.context.products)
コード例 #17
0
    def __init__(
        self, target, root_dir, extra_targets=None, builder=None, platforms=None, interpreter=None, conn_timeout=None
    ):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._platforms = platforms
        self._interpreter = interpreter or PythonInterpreter.get()
        self._extra_targets = list(extra_targets) if extra_targets is not None else []
        self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter)

        # Note: unrelated to the general pants artifact cache.
        self._egg_cache_root = os.path.join(
            PythonSetup(self._config).scratch_dir("artifact_cache", default_name="artifacts"),
            str(self._interpreter.identity),
        )

        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._egg_cache_root)
コード例 #18
0
ファイル: python_chroot.py プロジェクト: kevints/commons
  def __init__(self, target, root_dir, extra_targets=None, builder=None):
    self._config = Config.load()

    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    artifact_cache_root = \
      os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
    self._artifact_cache = ArtifactCache(artifact_cache_root)
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = PythonResolver([self._target] + self._extra_targets)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())
    self._platforms = (Platform.current(),)
    self._pythons = (sys.version[:3],)

    # TODO(wickman) Should this be in the binary builder?
    if isinstance(self._target, PythonBinary):
      self._platforms = self._target._platforms
      self._pythons = self._target._interpreters
コード例 #19
0
ファイル: python_chroot.py プロジェクト: wickman/commons
class PythonChroot(object):
    _VALID_DEPENDENCIES = {
        PythonLibrary: 'libraries',
        PythonRequirement: 'reqs',
        PythonBinary: 'binaries',
        PythonThriftLibrary: 'thrifts',
        PythonAntlrLibrary: 'antlrs',
        PythonTests: 'tests'
    }

    class InvalidDependencyException(Exception):
        def __init__(self, target):
            Exception.__init__(
                self, "Not a valid Python dependency! Found: %s" % target)

    class BuildFailureException(Exception):
        def __init__(self, msg):
            Exception.__init__(self, msg)

    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = MultiResolver.from_target(self._config,
                                                   target,
                                                   conn_timeout=conn_timeout)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())

        artifact_cache_root = os.path.join(
            self._config.get('python-setup', 'artifact_cache'),
            '%s' % PythonIdentity.get())
        self._artifact_cache = FileBasedArtifactCache(
            None, self._root, artifact_cache_root,
            self._builder.add_dependency_file)

    def __del__(self):
        if os.getenv('PANTS_LEAVE_CHROOT') is None:
            safe_rmtree(self.path())

    @property
    def builder(self):
        return self._builder

    def debug(self, msg, indent=0):
        if os.getenv('PANTS_VERBOSE') is not None:
            print('%s%s' % (' ' * indent, msg))

    def path(self):
        return self._builder.path()

    def _dump_library(self, library):
        def translate_module(module):
            if module is None:
                module = ''
            return module.replace('.', os.path.sep)

        def copy_to_chroot(base, path, relative_to, add_function):
            src = os.path.join(self._root, base, path)
            dst = os.path.join(translate_module(relative_to), path)
            add_function(src, dst)

        self.debug('  Dumping library: %s [relative module: %s]' %
                   (library, library.module))
        for filename in library.sources:
            copy_to_chroot(library.target_base, filename, library.module,
                           self._builder.add_source)
        for filename in library.resources:
            copy_to_chroot(library.target_base, filename, library.module,
                           self._builder.add_resource)

    def _dump_requirement(self, req, dynamic, repo):
        self.debug('  Dumping requirement: %s%s%s' %
                   (str(req), ' (dynamic)' if dynamic else '',
                    ' (repo: %s)' if repo else ''))
        self._builder.add_requirement(req, dynamic, repo)

    def _dump_distribution(self, dist):
        self.debug('  Dumping distribution: .../%s' %
                   os.path.basename(dist.location))
        self._builder.add_distribution(dist)

    def _dump_bin(self, binary_name, base):
        src = os.path.join(self._root, base, binary_name)
        self.debug('  Dumping binary: %s' % binary_name)
        self._builder.set_executable(src, os.path.basename(src))

    def _dump_thrift_library(self, library):
        self._dump_built_library(
            library, PythonThriftBuilder(library, self._root, self._config))

    def _dump_antlr_library(self, library):
        self._dump_built_library(library,
                                 PythonAntlrBuilder(library, self._root))

    def _dump_built_library(self, library, builder):
        # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
        absolute_sources = library.expand_files()
        absolute_sources.sort()
        cache_key = self._key_generator.key_for(library.id, absolute_sources)
        if self._artifact_cache.has(cache_key):
            self.debug('  Generating (cached) %s...' % library)
            self._artifact_cache.use_cached_files(cache_key)
        else:
            self.debug('  Generating %s...' % library)
            egg_file = builder.build_egg()
            if not egg_file:
                raise PythonChroot.BuildFailureException(
                    "Failed to build %s!" % library)
            src_egg_file = egg_file
            dst_egg_file = os.path.join(
                os.path.dirname(egg_file),
                cache_key.hash + '_' + os.path.basename(egg_file))
            self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
            os.rename(src_egg_file, dst_egg_file)
            self._artifact_cache.insert(cache_key, [dst_egg_file])
            self._builder.add_egg(dst_egg_file)

    def resolve(self, targets):
        children = defaultdict(OrderedSet)

        def add_dep(trg):
            if is_concrete(trg):
                for target_type, target_key in self._VALID_DEPENDENCIES.items(
                ):
                    if isinstance(trg, target_type):
                        children[target_key].add(trg)
                        return
            raise self.InvalidDependencyException(trg)

        for target in targets:
            target.walk(add_dep)
        return children

    def dump(self):
        self.debug('Building PythonBinary %s:' % self._target)

        targets = self.resolve([self._target] + self._extra_targets)

        for lib in targets['libraries']:
            self._dump_library(lib)

        for req in targets['reqs']:
            if not req.should_build():
                self.debug('Skipping %s based upon version filter' % req)
                continue
            self._dump_requirement(req._requirement, req._dynamic,
                                   req._repository)

        for dist in self._resolver.resolve(req._requirement
                                           for req in targets['reqs']
                                           if req.should_build()):
            self._dump_distribution(dist)

        if targets['thrifts']:
            default_thrift_version = self._config.get('python-thrift',
                                                      'default-version',
                                                      default='0.9')
            thrift_versions = set()
            for thr in targets['thrifts']:
                self._dump_thrift_library(thr)
                thrift_version = thr.thrift_version or default_thrift_version
                thrift_versions.add(thrift_version)
            if len(thrift_versions) > 1:
                print('WARNING: Target has multiple thrift versions!')
            for version in thrift_versions:
                self._builder.add_requirement('thrift==%s' % version)
                for dist in self._resolver.resolve(
                        'thrift==%s' % version for version in thrift_versions):
                    self._dump_distribution(dist)

        for antlr in targets['antlrs']:
            self._dump_antlr_library(antlr)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        for binary in targets['binaries']:
            if len(binary.sources) > 0:
                self._dump_bin(binary.sources[0], binary.target_base)

        return self._builder
コード例 #20
0
ファイル: python_chroot.py プロジェクト: CodeWarltz/commons
class PythonChroot(object):
  _VALID_DEPENDENCIES = {
    PythonLibrary: 'libraries',
    PythonRequirement: 'reqs',
    PythonBinary: 'binaries',
    PythonThriftLibrary: 'thrifts',
    PythonAntlrLibrary: 'antlrs',
    PythonTests: 'tests'
  }

  MEMOIZED_THRIFTS = {}

  class InvalidDependencyException(Exception):
    def __init__(self, target):
      Exception.__init__(self, "Not a valid Python dependency! Found: %s" % target)

  def __init__(self, target, root_dir, extra_targets=None, builder=None, interpreter=None,
      conn_timeout=None):
    self._config = Config.load()
    self._target = target
    self._root = root_dir
    self._interpreter = interpreter or PythonInterpreter.get()
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = MultiResolver(self._config, target, conn_timeout=conn_timeout)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp(), interpreter=self._interpreter)

    # Note: unrelated to the general pants artifact cache.
    self._egg_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'),
                                        str(self._interpreter.identity))

    self._key_generator = CacheKeyGenerator()
    self._build_invalidator = BuildInvalidator( self._egg_cache_root)


  def __del__(self):
    if os.getenv('PANTS_LEAVE_CHROOT') is None:
      safe_rmtree(self.path())
    else:
      self.debug('Left chroot at %s' % self.path())

  @property
  def builder(self):
    return self._builder

  def debug(self, msg, indent=0):
    if os.getenv('PANTS_VERBOSE') is not None:
      print('%s%s' % (' ' * indent, msg))

  def path(self):
    return self._builder.path()

  def _dump_library(self, library):
    def copy_to_chroot(base, path, add_function):
      src = os.path.join(self._root, base, path)
      add_function(src, path)

    self.debug('  Dumping library: %s' % library)
    for filename in library.sources:
      copy_to_chroot(library.target_base, filename, self._builder.add_source)
    for filename in library.resources:
      copy_to_chroot(library.target_base, filename, self._builder.add_resource)

  def _dump_requirement(self, req, dynamic, repo):
    self.debug('  Dumping requirement: %s%s%s' % (str(req),
      ' (dynamic)' if dynamic else '', ' (repo: %s)' % repo if repo else ''))
    self._builder.add_requirement(req, dynamic, repo)

  def _dump_distribution(self, dist):
    self.debug('  Dumping distribution: .../%s' % os.path.basename(dist.location))
    self._builder.add_distribution(dist)

  def _generate_requirement(self, library, builder_cls):
    library_key = self._key_generator.key_for_target(library)
    builder = builder_cls(library, self._root, self._config, '-' + library_key.hash[:8])

    cache_dir = os.path.join(self._egg_cache_root, library_key.id)
    if self._build_invalidator.needs_update(library_key):
      sdist = builder.build(interpreter=self._interpreter)
      safe_mkdir(cache_dir)
      shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist)))
      self._build_invalidator.update(library_key)

    with ParseContext.temp():
      return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)

  def _generate_thrift_requirement(self, library):
    return self._generate_requirement(library, PythonThriftBuilder)

  def _generate_antlr_requirement(self, library):
    return self._generate_requirement(library, PythonAntlrBuilder)

  def resolve(self, targets):
    children = defaultdict(OrderedSet)
    def add_dep(trg):
      if trg.is_concrete:
        for target_type, target_key in self._VALID_DEPENDENCIES.items():
          if isinstance(trg, target_type):
            children[target_key].add(trg)
            return
      raise self.InvalidDependencyException(trg)
    for target in targets:
      target.walk(add_dep)
    return children

  def dump(self):
    self.debug('Building PythonBinary %s:' % self._target)

    targets = self.resolve([self._target] + self._extra_targets)

    for lib in targets['libraries'] | targets['binaries']:
      self._dump_library(lib)

    generated_reqs = OrderedSet()
    if targets['thrifts']:
      for thr in set(targets['thrifts']):
        if thr not in self.MEMOIZED_THRIFTS:
          self.MEMOIZED_THRIFTS[thr] = self._generate_thrift_requirement(thr)
        generated_reqs.add(self.MEMOIZED_THRIFTS[thr])
      with ParseContext.temp():
        # trick pants into letting us add this python requirement, otherwise we get
        # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to
        # PythonRequirement(thrift)
        #
        # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we
        # should first detect if any explicit thrift requirements have been added and use
        # those.  Only if they have not been supplied should we auto-inject it.
        generated_reqs.add(PythonRequirement('thrift', use_2to3=True,
            name='thrift-' + ''.join(random.sample('0123456789abcdef' * 8, 8))))

    for antlr in targets['antlrs']:
      generated_reqs.add(self._generate_antlr_requirement(antlr))

    targets['reqs'] |= generated_reqs
    for req in targets['reqs']:
      if not req.should_build(self._interpreter.python, Platform.current()):
        self.debug('Skipping %s based upon version filter' % req)
        continue
      self._dump_requirement(req._requirement, False, req._repository)

    reqs_to_build = (req for req in targets['reqs']
        if req.should_build(self._interpreter.python, Platform.current()))
    for dist in self._resolver.resolve(reqs_to_build, interpreter=self._interpreter):
      self._dump_distribution(dist)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    return self._builder
コード例 #21
0
ファイル: python_chroot.py プロジェクト: BabyDuncan/commons
class PythonChroot(object):
  _VALID_DEPENDENCIES = {
    PythonLibrary: 'libraries',
    PythonRequirement: 'reqs',
    PythonBinary: 'binaries',
    PythonThriftLibrary: 'thrifts',
    PythonAntlrLibrary: 'antlrs',
    PythonTests: 'tests'
  }

  class InvalidDependencyException(Exception):
    def __init__(self, target):
      Exception.__init__(self, "Not a valid Python dependency! Found: %s" % target)

  class BuildFailureException(Exception):
    def __init__(self, msg):
      Exception.__init__(self, msg)

  def __init__(self, target, root_dir, extra_targets=None, builder=None, conn_timeout=None):
    self._config = Config.load()
    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = MultiResolver.from_target(self._config, target, conn_timeout=conn_timeout)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())

    artifact_cache_root = os.path.join(self._config.get('python-setup', 'artifact_cache'),
                                       '%s' % PythonIdentity.get())
    self._artifact_cache = FileBasedArtifactCache(None, self._root, artifact_cache_root,
                                                  self._builder.add_dependency_file)

  def __del__(self):
    if os.getenv('PANTS_LEAVE_CHROOT') is None:
      safe_rmtree(self.path())

  @property
  def builder(self):
    return self._builder

  def debug(self, msg, indent=0):
    if os.getenv('PANTS_VERBOSE') is not None:
      print('%s%s' % (' ' * indent, msg))

  def path(self):
    return self._builder.path()

  def _dump_library(self, library):
    def translate_module(module):
      if module is None:
        module = ''
      return module.replace('.', os.path.sep)

    def copy_to_chroot(base, path, relative_to, add_function):
      src = os.path.join(self._root, base, path)
      dst = os.path.join(translate_module(relative_to), path)
      add_function(src, dst)

    self.debug('  Dumping library: %s [relative module: %s]' % (library, library.module))
    for filename in library.sources:
      copy_to_chroot(library.target_base, filename, library.module, self._builder.add_source)
    for filename in library.resources:
      copy_to_chroot(library.target_base, filename, library.module, self._builder.add_resource)

  def _dump_requirement(self, req, dynamic, repo):
    self.debug('  Dumping requirement: %s%s%s' % (str(req),
      ' (dynamic)' if dynamic else '', ' (repo: %s)' if repo else ''))
    self._builder.add_requirement(req, dynamic, repo)

  def _dump_distribution(self, dist):
    self.debug('  Dumping distribution: .../%s' % os.path.basename(dist.location))
    self._builder.add_distribution(dist)

  def _dump_bin(self, binary_name, base):
    src = os.path.join(self._root, base, binary_name)
    self.debug('  Dumping binary: %s' % binary_name)
    self._builder.set_executable(src, os.path.basename(src))

  def _dump_thrift_library(self, library):
    self._dump_built_library(library, PythonThriftBuilder(library, self._root, self._config))

  def _dump_antlr_library(self, library):
    self._dump_built_library(library, PythonAntlrBuilder(library, self._root))

  def _dump_built_library(self, library, builder):
    # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
    absolute_sources = library.expand_files()
    absolute_sources.sort()
    cache_key = self._key_generator.key_for(library.id, absolute_sources)
    if self._artifact_cache.has(cache_key):
      self.debug('  Generating (cached) %s...' % library)
      self._artifact_cache.use_cached_files(cache_key)
    else:
      self.debug('  Generating %s...' % library)
      egg_file = builder.build_egg()
      if not egg_file:
        raise PythonChroot.BuildFailureException("Failed to build %s!" % library)
      src_egg_file = egg_file
      dst_egg_file = os.path.join(os.path.dirname(egg_file),
          cache_key.hash + '_' + os.path.basename(egg_file))
      self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
      os.rename(src_egg_file, dst_egg_file)
      self._artifact_cache.insert(cache_key, [dst_egg_file])
      self._builder.add_egg(dst_egg_file)

  def resolve(self, targets):
    children = defaultdict(OrderedSet)
    def add_dep(trg):
      if is_concrete(trg):
        for target_type, target_key in self._VALID_DEPENDENCIES.items():
          if isinstance(trg, target_type):
            children[target_key].add(trg)
            return
      raise self.InvalidDependencyException(trg)
    for target in targets:
      target.walk(add_dep)
    return children

  def dump(self):
    self.debug('Building PythonBinary %s:' % self._target)

    targets = self.resolve([self._target] + self._extra_targets)

    for lib in targets['libraries']:
      self._dump_library(lib)

    for req in targets['reqs']:
      if not req.should_build():
        self.debug('Skipping %s based upon version filter' % req)
        continue
      self._dump_requirement(req._requirement, req._dynamic, req._repository)

    for dist in self._resolver.resolve(
        req._requirement for req in targets['reqs'] if req.should_build()):
      self._dump_distribution(dist)

    if targets['thrifts']:
      default_thrift_version = self._config.get('python-thrift', 'default-version', default='0.9')
      thrift_versions = set()
      for thr in targets['thrifts']:
        self._dump_thrift_library(thr)
        thrift_version = thr.thrift_version or default_thrift_version
        thrift_versions.add(thrift_version)
      if len(thrift_versions) > 1:
        print('WARNING: Target has multiple thrift versions!')
      for version in thrift_versions:
        self._builder.add_requirement('thrift==%s' % version)
        for dist in self._resolver.resolve('thrift==%s' % version for version in thrift_versions):
          self._dump_distribution(dist)

    for antlr in targets['antlrs']:
      self._dump_antlr_library(antlr)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    for binary in targets['binaries']:
      if len(binary.sources) > 0:
        self._dump_bin(binary.sources[0], binary.target_base)

    return self._builder
コード例 #22
0
class PythonChroot(object):
    _VALID_DEPENDENCIES = {
        PythonLibrary: 'libraries',
        PythonRequirement: 'reqs',
        PythonBinary: 'binaries',
        PythonThriftLibrary: 'thrifts',
        PythonAntlrLibrary: 'antlrs',
        PythonTests: 'tests'
    }

    MEMOIZED_THRIFTS = {}

    class InvalidDependencyException(Exception):
        def __init__(self, target):
            Exception.__init__(
                self, "Not a valid Python dependency! Found: %s" % target)

    def __init__(self,
                 target,
                 root_dir,
                 extra_targets=None,
                 builder=None,
                 interpreter=None,
                 conn_timeout=None):
        self._config = Config.load()
        self._target = target
        self._root = root_dir
        self._interpreter = interpreter or PythonInterpreter.get()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = MultiResolver(self._config,
                                       target,
                                       conn_timeout=conn_timeout)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp(),
                                              interpreter=self._interpreter)

        # Note: unrelated to the general pants artifact cache.
        self._egg_cache_root = os.path.join(
            self._config.get('python-setup', 'artifact_cache'),
            str(self._interpreter.identity))

        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._egg_cache_root)

    def __del__(self):
        if os.getenv('PANTS_LEAVE_CHROOT') is None:
            safe_rmtree(self.path())
        else:
            self.debug('Left chroot at %s' % self.path())

    @property
    def builder(self):
        return self._builder

    def debug(self, msg, indent=0):
        if os.getenv('PANTS_VERBOSE') is not None:
            print('%s%s' % (' ' * indent, msg))

    def path(self):
        return self._builder.path()

    def _dump_library(self, library):
        def copy_to_chroot(base, path, add_function):
            src = os.path.join(self._root, base, path)
            add_function(src, path)

        self.debug('  Dumping library: %s' % library)
        for filename in library.sources:
            copy_to_chroot(library.target_base, filename,
                           self._builder.add_source)
        for filename in library.resources:
            copy_to_chroot(library.target_base, filename,
                           self._builder.add_resource)

    def _dump_requirement(self, req, dynamic, repo):
        self.debug('  Dumping requirement: %s%s%s' %
                   (str(req), ' (dynamic)' if dynamic else '',
                    ' (repo: %s)' % repo if repo else ''))
        self._builder.add_requirement(req, dynamic, repo)

    def _dump_distribution(self, dist):
        self.debug('  Dumping distribution: .../%s' %
                   os.path.basename(dist.location))
        self._builder.add_distribution(dist)

    def _generate_requirement(self, library, builder_cls):
        library_key = self._key_generator.key_for_target(library)
        builder = builder_cls(library, self._root, self._config,
                              '-' + library_key.hash[:8])

        cache_dir = os.path.join(self._egg_cache_root, library_key.id)
        if self._build_invalidator.needs_update(library_key):
            sdist = builder.build(interpreter=self._interpreter)
            safe_mkdir(cache_dir)
            shutil.copy(sdist, os.path.join(cache_dir,
                                            os.path.basename(sdist)))
            self._build_invalidator.update(library_key)

        with ParseContext.temp():
            return PythonRequirement(builder.requirement_string(),
                                     repository=cache_dir,
                                     use_2to3=True)

    def _generate_thrift_requirement(self, library):
        return self._generate_requirement(library, PythonThriftBuilder)

    def _generate_antlr_requirement(self, library):
        return self._generate_requirement(library, PythonAntlrBuilder)

    def resolve(self, targets):
        children = defaultdict(OrderedSet)

        def add_dep(trg):
            if trg.is_concrete:
                for target_type, target_key in self._VALID_DEPENDENCIES.items(
                ):
                    if isinstance(trg, target_type):
                        children[target_key].add(trg)
                        return
            raise self.InvalidDependencyException(trg)

        for target in targets:
            target.walk(add_dep)
        return children

    def dump(self):
        self.debug('Building PythonBinary %s:' % self._target)

        targets = self.resolve([self._target] + self._extra_targets)

        for lib in targets['libraries'] | targets['binaries']:
            self._dump_library(lib)

        generated_reqs = OrderedSet()
        if targets['thrifts']:
            for thr in set(targets['thrifts']):
                if thr not in self.MEMOIZED_THRIFTS:
                    self.MEMOIZED_THRIFTS[
                        thr] = self._generate_thrift_requirement(thr)
                generated_reqs.add(self.MEMOIZED_THRIFTS[thr])
            with ParseContext.temp():
                # trick pants into letting us add this python requirement, otherwise we get
                # TargetDefinitionException: Error in target BUILD.temp:thrift: duplicate to
                # PythonRequirement(thrift)
                #
                # TODO(wickman) Instead of just blindly adding a PythonRequirement for thrift, we
                # should first detect if any explicit thrift requirements have been added and use
                # those.  Only if they have not been supplied should we auto-inject it.
                generated_reqs.add(
                    PythonRequirement(
                        'thrift',
                        use_2to3=True,
                        name='thrift-' +
                        ''.join(random.sample('0123456789abcdef' * 8, 8))))

        for antlr in targets['antlrs']:
            generated_reqs.add(self._generate_antlr_requirement(antlr))

        targets['reqs'] |= generated_reqs
        for req in targets['reqs']:
            if not req.should_build(self._interpreter.python,
                                    Platform.current()):
                self.debug('Skipping %s based upon version filter' % req)
                continue
            self._dump_requirement(req._requirement, False, req._repository)

        reqs_to_build = (
            req for req in targets['reqs']
            if req.should_build(self._interpreter.python, Platform.current()))
        for dist in self._resolver.resolve(reqs_to_build,
                                           interpreter=self._interpreter):
            self._dump_distribution(dist)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        return self._builder
コード例 #23
0
ファイル: python_chroot.py プロジェクト: ugodiggi/commons
class PythonChroot(object):
    class BuildFailureException(Exception):
        def __init__(self, msg):
            Exception.__init__(self, msg)

    def __init__(self, target, root_dir, extra_targets=None, builder=None):
        self._config = Config.load()

        self._target = target
        self._root = root_dir
        self._key_generator = CacheKeyGenerator()
        self._extra_targets = list(
            extra_targets) if extra_targets is not None else []
        self._resolver = PythonResolver([self._target] + self._extra_targets)
        self._builder = builder or PEXBuilder(tempfile.mkdtemp())
        self._platforms = (Platform.current(), )
        self._pythons = (sys.version[:3], )

        artifact_cache_root = \
          os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
        self._artifact_cache = FileBasedArtifactCache(
            None, self._root, artifact_cache_root,
            self._builder.add_dependency_file)

        # TODO(wickman) Should this be in the binary builder?
        if isinstance(self._target, PythonBinary):
            self._platforms = self._target._platforms
            self._pythons = self._target._interpreters

    def __del__(self):
        if os.getenv('PANTS_LEAVE_CHROOT') is None:
            safe_rmtree(self.path())

    def debug(self, msg, indent=0):
        if os.getenv('PANTS_VERBOSE') is not None:
            print('%s%s' % (' ' * indent, msg))

    def path(self):
        return self._builder.path()

    def _dump_library(self, library):
        def translate_module(module):
            if module is None:
                module = ''
            return module.replace('.', os.path.sep)

        def copy_to_chroot(base, path, relative_to, add_function):
            src = os.path.join(self._root, base, path)
            dst = os.path.join(translate_module(relative_to), path)
            add_function(src, dst)

        self.debug('  Dumping library: %s [relative module: %s]' %
                   (library, library.module))
        for filename in library.sources:
            copy_to_chroot(library.target_base, filename, library.module,
                           self._builder.add_source)
        for filename in library.resources:
            copy_to_chroot(library.target_base, filename, library.module,
                           self._builder.add_resource)

    def _dump_requirement(self, req, dynamic, repo):
        self.debug('  Dumping requirement: %s%s%s' %
                   (str(req), ' (dynamic)' if dynamic else '',
                    ' (repo: %s)' if repo else ''))
        self._builder.add_requirement(req, dynamic, repo)

    def _dump_distribution(self, dist):
        self.debug('  Dumping distribution: .../%s' %
                   os.path.basename(dist.location))
        self._builder.add_distribution(dist)

    def _dump_bin(self, binary_name, base):
        src = os.path.join(self._root, base, binary_name)
        self.debug('  Dumping binary: %s' % binary_name)
        self._builder.set_executable(src, os.path.basename(src))

    def _dump_thrift_library(self, library):
        self._dump_built_library(library,
                                 PythonThriftBuilder(library, self._root))

    def _dump_antlr_library(self, library):
        self._dump_built_library(library,
                                 PythonAntlrBuilder(library, self._root))

    def _dump_built_library(self, library, builder):
        # TODO(wickman): Port this over to the Installer+Distiller and stop using FileBasedArtifactCache.
        absolute_sources = library.expand_files()
        absolute_sources.sort()
        cache_key = self._key_generator.key_for(library.id, absolute_sources)
        if self._artifact_cache.has(cache_key):
            self.debug('  Generating (cached) %s...' % library)
            self._artifact_cache.use_cached_files(cache_key)
        else:
            self.debug('  Generating %s...' % library)
            egg_file = builder.build_egg()
            if not egg_file:
                raise PythonChroot.BuildFailureException(
                    "Failed to build %s!" % library)
            src_egg_file = egg_file
            dst_egg_file = os.path.join(
                os.path.dirname(egg_file),
                cache_key.hash + '_' + os.path.basename(egg_file))
            self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
            os.rename(src_egg_file, dst_egg_file)
            self._artifact_cache.insert(cache_key, [dst_egg_file])
            self._builder.add_egg(dst_egg_file)

    def dump(self):
        self.debug('Building PythonBinary %s:' % self._target)

        targets = self._resolver.resolve()

        for lib in targets['libraries']:
            self._dump_library(lib)

        for req in targets['reqs']:
            if not req.should_build():
                self.debug('Skipping %s based upon version filter' % req)
                continue
            self._dump_requirement(req._requirement, req._dynamic,
                                   req._repository)

        for dist in ReqResolver.resolve(
            (req._requirement
             for req in targets['reqs'] if req.should_build()),
                self._config,
                self._platforms,
                self._pythons,
                ignore_errors=self._builder.info().ignore_errors):
            self._dump_distribution(dist)

        for thr in targets['thrifts']:
            self._dump_thrift_library(thr)

        for antlr in targets['antlrs']:
            self._dump_antlr_library(antlr)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        for binary in targets['binaries']:
            if len(binary.sources) > 0:
                self._dump_bin(binary.sources[0], binary.target_base)

        return self._builder
コード例 #24
0
def test_env(content=TEST_CONTENT):
    with temporary_dir() as d:
        with tempfile.NamedTemporaryFile() as f:
            f.write(content)
            f.flush()
            yield f, CacheKeyGenerator(), BuildInvalidator(d)
コード例 #25
0
ファイル: python_chroot.py プロジェクト: kevints/commons
class PythonChroot(object):
  class BuildFailureException(Exception):
    def __init__(self, msg):
      Exception.__init__(self, msg)

  def __init__(self, target, root_dir, extra_targets=None, builder=None):
    self._config = Config.load()

    self._target = target
    self._root = root_dir
    self._key_generator = CacheKeyGenerator()
    artifact_cache_root = \
      os.path.join(self._config.get('python-setup', 'artifact_cache'), '%s' % PythonIdentity.get())
    self._artifact_cache = ArtifactCache(artifact_cache_root)
    self._extra_targets = list(extra_targets) if extra_targets is not None else []
    self._resolver = PythonResolver([self._target] + self._extra_targets)
    self._builder = builder or PEXBuilder(tempfile.mkdtemp())
    self._platforms = (Platform.current(),)
    self._pythons = (sys.version[:3],)

    # TODO(wickman) Should this be in the binary builder?
    if isinstance(self._target, PythonBinary):
      self._platforms = self._target._platforms
      self._pythons = self._target._interpreters

  def __del__(self):
    if os.getenv('PANTS_LEAVE_CHROOT') is None:
      safe_rmtree(self.path())

  def debug(self, msg, indent=0):
    if os.getenv('PANTS_VERBOSE') is not None:
      print('%s%s' % (' ' * indent, msg))

  def path(self):
    return self._builder.path()

  def _dump_library(self, library):
    def translate_module(module):
      if module is None:
        module = ''
      return module.replace('.', os.path.sep)

    def copy_to_chroot(base, path, relative_to, add_function):
      src = os.path.join(self._root, base, path)
      dst = os.path.join(translate_module(relative_to), path)
      add_function(src, dst)

    self.debug('  Dumping library: %s [relative module: %s]' % (library, library.module))
    for filename in library.sources:
      copy_to_chroot(library.target_base, filename, library.module, self._builder.add_source)
    for filename in library.resources:
      copy_to_chroot(library.target_base, filename, library.module, self._builder.add_resource)

  def _dump_requirement(self, req, dynamic, repo):
    self.debug('  Dumping requirement: %s%s%s' % (str(req),
      ' (dynamic)' if dynamic else '', ' (repo: %s)' if repo else ''))
    self._builder.add_requirement(req, dynamic, repo)

  def _dump_distribution(self, dist):
    self.debug('  Dumping distribution: .../%s' % os.path.basename(dist.location))
    self._builder.add_distribution(dist)

  def _dump_bin(self, binary_name, base):
    src = os.path.join(self._root, base, binary_name)
    self.debug('  Dumping binary: %s' % binary_name)
    self._builder.set_executable(src, os.path.basename(src))

  def _dump_thrift_library(self, library):
    self._dump_built_library(library, PythonThriftBuilder(library, self._root))

  def _dump_antlr_library(self, library):
    self._dump_built_library(library, PythonAntlrBuilder(library, self._root))

  def _dump_built_library(self, library, builder):
    # TODO(wickman): Port this over to the Installer+Distiller and stop using ArtifactCache.
    absolute_sources = library.expand_files()
    absolute_sources.sort()
    cache_key = self._key_generator.key_for(library.id, absolute_sources)
    if self._artifact_cache.has(cache_key):
      self.debug('  Generating (cached) %s...' % library)
      self._artifact_cache.use_cached_files(cache_key, self._builder.add_dependency_file)
    else:
      self.debug('  Generating %s...' % library)
      egg_file = builder.build_egg()
      if not egg_file:
        raise PythonChroot.BuildFailureException("Failed to build %s!" % library)
      src_egg_file = egg_file
      dst_egg_file = os.path.join(os.path.dirname(egg_file),
          cache_key.hash + '_' + os.path.basename(egg_file))
      self.debug('       %s => %s' % (src_egg_file, dst_egg_file))
      os.rename(src_egg_file, dst_egg_file)
      self._artifact_cache.insert(cache_key, [dst_egg_file])
      self._builder.add_egg(dst_egg_file)

  def dump(self):
    self.debug('Building PythonBinary %s:' % self._target)

    targets = self._resolver.resolve()

    for lib in targets['libraries']:
      self._dump_library(lib)

    for req in targets['reqs']:
      if not req.should_build():
        self.debug('Skipping %s based upon version filter' % req)
        continue
      self._dump_requirement(req._requirement, req._dynamic, req._repository)

    for dist in ReqResolver.resolve(
        (req._requirement for req in targets['reqs'] if req.should_build()),
        self._config, self._platforms, self._pythons,
        ignore_errors=self._builder.info().ignore_errors):
      self._dump_distribution(dist)

    for thr in targets['thrifts']:
      self._dump_thrift_library(thr)

    for antlr in targets['antlrs']:
      self._dump_antlr_library(antlr)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    for binary in targets['binaries']:
      if len(binary.sources) > 0:
        self._dump_bin(binary.sources[0], binary.target_base)

    return self._builder