Exemple #1
0
    def Prepare(self, components, version=None):
        """Ensures the components of an SDK exist and are read-locked.

    For a given SDK version, pulls down missing components, and provides a
    context where the components are read-locked, which prevents the cache from
    deleting them during its purge operations.

    Arguments:
      gs_ctx: GSContext object.
      components: A list of specific components(tarballs) to prepare.
      version: The version to prepare.  If not set, uses the version returned by
        GetDefaultVersion().

    Yields: An SDKFetcher.SDKContext namedtuple object.  The attributes of the
      object are:

      version: The version that was prepared.
      key_map: Dictionary that contains CacheReference objects for the SDK
        artifacts, indexed by cache key.
    """
        if version is None:
            version = self.GetDefaultVersion()
        components = list(components)

        key_map = {}
        fetch_urls = {}
        version_base = os.path.join(self.gs_base, version)

        # Fetch toolchains from separate location.
        if self.TARGET_TOOLCHAIN_KEY in components:
            metadata = self._GetMetadata(version)
            tc_tuple = metadata['toolchain-tuple'][0]
            fetch_urls[self.TARGET_TOOLCHAIN_KEY] = os.path.join(
                'gs://', constants.SDK_GS_BUCKET,
                metadata['toolchain-url'] % {'target': tc_tuple})
            components.remove(self.TARGET_TOOLCHAIN_KEY)

        fetch_urls.update(
            (t, os.path.join(version_base, t)) for t in components)
        try:
            for key, url in fetch_urls.iteritems():
                cache_key = (self.board, version, key)
                ref = self.tarball_cache.Lookup(cache_key)
                key_map[key] = ref
                ref.Acquire()
                if not ref.Exists(lock=True):
                    # TODO(rcui): Parallelize this.  Requires acquiring locks *before*
                    # generating worker processes; therefore the functionality needs to
                    # be moved into the DiskCache class itself -
                    # i.e.,DiskCache.ParallelSetDefault().
                    self._UpdateTarball(url, ref)

            yield self.SDKContext(version, key_map)
        finally:
            # TODO(rcui): Move to using cros_build_lib.ContextManagerStack()
            cros_build_lib.SafeRun(
                [ref.Release for ref in key_map.itervalues()])
Exemple #2
0
    def stop(self):
        """Restores namespace to the unmocked state."""
        # pylint: disable=W0212
        try:
            if self.__saved_env__ is not None:
                osutils.SetEnvironment(self.__saved_env__)

            if self.started:
                cros_build_lib.SafeRun([self.PreStop, self._stop])
        finally:
            self.started = False
            if getattr(self, 'tempdir', None):
                osutils._TempDirTearDown(self, False)
Exemple #3
0
  def stop(self):
    """Restores namespace to the unmocked state."""
    try:
      if self.__saved_env__ is not None:
        osutils.SetEnvironment(self.__saved_env__)

      tasks = ([self.PreStop] + [p.stop for p in self.patchers.itervalues()] +
               [p.stop for p in self.external_patchers])
      if self._tempdir_obj is not None:
        tasks += [self._tempdir_obj.Cleanup]
      cros_build_lib.SafeRun(tasks)
    finally:
      self.started = False
      self.tempdir, self._tempdir_obj = None, None
Exemple #4
0
    def Prepare(self,
                components,
                version=None,
                target_tc=None,
                toolchain_url=None):
        """Ensures the components of an SDK exist and are read-locked.

    For a given SDK version, pulls down missing components, and provides a
    context where the components are read-locked, which prevents the cache from
    deleting them during its purge operations.

    If both target_tc and toolchain_url arguments are provided, then this
    does not download metadata.json for the given version. Otherwise, this
    function requires metadata.json for the given version to exist.

    Args:
      gs_ctx: GSContext object.
      components: A list of specific components(tarballs) to prepare.
      version: The version to prepare.  If not set, uses the version returned by
        GetDefaultVersion().  If there is no default version set (this is the
        first time we are being executed), then we update the default version.
      target_tc: Target toolchain name to use, e.g. x86_64-cros-linux-gnu
      toolchain_url: Format pattern for path to fetch toolchain from,
        e.g. 2014/04/%(target)s-2014.04.23.220740.tar.xz

    Yields:
      An SDKFetcher.SDKContext namedtuple object.  The attributes of the
      object are:
        version: The version that was prepared.
        target_tc: Target toolchain name.
        key_map: Dictionary that contains CacheReference objects for the SDK
          artifacts, indexed by cache key.
    """
        if version is None and self.sdk_path is None:
            version = self.GetDefaultVersion()
            if version is None:
                version, _ = self.UpdateDefaultVersion()
        components = list(components)

        key_map = {}
        fetch_urls = {}

        if not target_tc or not toolchain_url:
            metadata = self._GetMetadata(version)
            target_tc = target_tc or metadata['toolchain-tuple'][0]
            toolchain_url = toolchain_url or metadata['toolchain-url']

        # Fetch toolchains from separate location.
        if self.TARGET_TOOLCHAIN_KEY in components:
            fetch_urls[self.TARGET_TOOLCHAIN_KEY] = os.path.join(
                self.toolchain_path, toolchain_url % {'target': target_tc})
            components.remove(self.TARGET_TOOLCHAIN_KEY)

        version_base = self._GetVersionGSBase(version)
        fetch_urls.update(
            (t, os.path.join(version_base, t)) for t in components)
        try:
            for key, url in fetch_urls.iteritems():
                cache_key = self._GetCacheKeyForComponent(version, key)
                ref = self.tarball_cache.Lookup(cache_key)
                key_map[key] = ref
                ref.Acquire()
                if not ref.Exists(lock=True):
                    # TODO(rcui): Parallelize this.  Requires acquiring locks *before*
                    # generating worker processes; therefore the functionality needs to
                    # be moved into the DiskCache class itself -
                    # i.e.,DiskCache.ParallelSetDefault().
                    self._UpdateTarball(url, ref)

            ctx_version = version
            if self.sdk_path is not None:
                ctx_version = CUSTOM_VERSION
            yield self.SDKContext(ctx_version, target_tc, key_map)
        finally:
            # TODO(rcui): Move to using cros_build_lib.ContextManagerStack()
            cros_build_lib.SafeRun(
                [ref.Release for ref in key_map.itervalues()])
Exemple #5
0
 def _stop(self):
     cros_build_lib.SafeRun([p.stop for p in self.patchers.itervalues()])