Ejemplo n.º 1
0
    def __init__(self, path):
        """Create a package cache.

        Args:
            path (str): Path on disk, must exist.
        """
        if not os.path.isdir(path):
            raise PackageCacheError("Not a directory: %s" % path)

        self.path = path

        # make dirs for internal use
        safe_makedirs(self._log_dir)
        safe_makedirs(self._pending_dir)
        safe_makedirs(self._remove_dir)
Ejemplo n.º 2
0
    def add_variants_async(self, variants):
        """Update the package cache by adding some or all of the given variants.

        This method is called when a context is created or sourced. Variants
        are then added to the cache in a separate process.
        """

        # A prod install is necessary because add_variants_async works by
        # starting a rez-pkg-cache proc, and this can only be done reliably in
        # a prod install. On non-windows we could fork instead, but there would
        # remain no good solution on windows.
        #
        if not system.is_production_rez_install:
            raise PackageCacheError(
                "PackageCache.add_variants_async is only supported in a "
                "production rez installation.")

        variants_ = []

        # trim down to those variants that are cachable, and not already cached
        for variant in variants:
            if not variant.parent.is_cachable:
                continue

            status, _ = self._get_cached_root(variant)
            if status == self.VARIANT_NOT_FOUND:
                variants_.append(variant)

        # if there are no variants to add, and no potential cleanup to do, then exit
        if not variants_ and config.package_cache_clean_limit < 0:
            return

        # Write each variant out to a file in the 'pending' dir in the cache. A
        # separate proc reads these files and then performs the actual variant
        # copy. Note that these files are unique, in case two rez procs attempt
        # to write out the same pending variant file at the same time.
        #
        pending_filenames = os.listdir(self._pending_dir)

        for variant in variants_:
            prefix = variant.parent.qualified_name + '-'
            handle_dict = variant.handle.to_dict()
            already_pending = False

            # check if this variant is already pending
            for filename in pending_filenames:
                if filename.startswith(prefix):
                    filepath = os.path.join(self._pending_dir, filename)
                    try:
                        with open(filepath) as f:
                            data = json.loads(f.read())
                    except:
                        continue  # maybe file was just deleted

                    if data == handle_dict:
                        already_pending = True
                        break

            if already_pending:
                continue

            filename = prefix + uuid4().hex + ".json"
            filepath = os.path.join(self._pending_dir, filename)
            with open(filepath, 'w') as f:
                f.write(json.dumps(handle_dict))

        # configure executable
        if platform.system() == "Windows":
            kwargs = {"creationflags": subprocess.CREATE_NEW_PROCESS_GROUP}
        else:
            kwargs = {"preexec_fn": os.setsid}

        exe = os.path.join(system.rez_bin_path, "rez-pkg-cache")
        if not exe:
            # this should not happen
            raise RuntimeError("Did not find rez-pkg-cache executable")

        # start caching subproc
        args = [exe, "--daemon", self.path]

        try:
            with open(os.devnull, 'w') as devnull:

                # don't suppress output if selftest running, easier to debug
                if system.selftest_is_running:
                    out_target = None
                else:
                    out_target = devnull

                _ = subprocess.Popen([exe, "--daemon", self.path],
                                     stdout=out_target,
                                     stderr=out_target,
                                     **kwargs)
        except Exception as e:
            print_warning(
                "Failed to start package caching daemon (command: %s): %s",
                ' '.join(args), e)
Ejemplo n.º 3
0
    def add_variant(self, variant, force=False):
        """Copy a variant's payload into the cache.

        The following steps are taken to ensure muti-thread/proc safety, and to
        guarantee that a partially-copied variant payload is never able to be
        used:

        1. The hash dir (eg '/<cache_dir>/foo/1.0.0/af8d') is created;
        2. A file lock mutex ('/<cache_dir>/.lock') is acquired;
        3. The file '/<cache_dir>/foo/1.0.0/af8d/.copying-a' (or -b, -c etc) is
           created. This tells rez that this variant is being copied and cannot
           be used yet;
        4. The file '/<cache_dir>/foo/1.0.0/af8d/a.json' is created. Now
           another proc/thread can't create the same local variant;
        5. The file lock is released;
        6. The variant payload is copied to '/<cache_dir>/foo/1.0.0/af8d/a';
        7. The '.copying-a' file is removed.

        Note that the variant will not be cached in the following circumstances,
        unless `force` is True:

        - The variant is not cachable as determined by `Variant.is_cachable`;
        - The variant is from a local package, and 'config.package_cache_local'
          is False;
        - The variant is stored on the same disk device as this cache, and
          config.package_cache_same_device' is False.

        Args:
            variant (`Variant`): The variant to copy into this cache
            force (bool): Copy the variant regardless. Use at your own risk (there
                is no guarantee the resulting variant payload will be functional).

        Returns:
            2-tuple:
            - str: Path to cached payload
            - int: One of:
              - VARIANT_FOUND
              - VARIANT_CREATED
              - VARIANT_COPYING
              - VARIANT_COPY_STALLED
        """
        from rez.utils.base26 import get_next_base26
        from rez.utils.filesystem import safe_makedirs

        # do some sanity checking on variant to cache
        package = variant.parent
        variant_root = getattr(variant, "root", None)

        if not variant_root:
            raise PackageCacheError(
                "Not cached - variant is a type that does not have a root: %s"
                % variant.uri)

        if not os.path.isdir(variant_root):
            raise PackageCacheError(
                "Not cached - variant %s root does not appear on disk: %s" %
                (variant.uri, variant_root))

        if not force:
            # package is configured to not be cachable
            if not package.is_cachable:
                raise PackageCacheError(
                    "Not cached - package is not cachable: %s" % package.uri)

            # package is local
            if not config.package_cache_local and variant.is_local:
                raise PackageCacheError("Not cached - package is local: %s" %
                                        package.uri)

            # Package is already on same disk device as package cache. Note that
            # this check is skipped on Windows + Py<3.4, as os.stat does not
            # support device identification.
            #
            dev_stat_not_supported = (platform.system() == "Windows"
                                      and sys.version_info[:2] < (3, 4))

            if not config.package_cache_same_device and not dev_stat_not_supported:
                st_pkgcache = os.stat(self.path)
                st_variant = os.stat(variant_root)
                if st_pkgcache.st_dev == st_variant.st_dev:
                    raise PackageCacheError(
                        "Not cached - variant %s is on same device as cache: %s"
                        % (variant.uri, variant_root))

            # Package belongs to a temp repo (this occurs when a package is
            # tested on pre_build/pre_release - see
            # https://github.com/nerdvegas/rez/wiki/Package-Definition-Guide#tests)
            #
            if package.repository.name() == "filesystem" and \
                    package.repository.location.startswith(config.tmpdir + os.sep):
                raise PackageCacheError(
                    "Not cached - package is in temp repository %s" %
                    package.repository)

        no_op_statuses = (self.VARIANT_FOUND, self.VARIANT_COPYING,
                          self.VARIANT_COPY_STALLED)

        # variant already exists, or is being copied to cache by another thread/proc
        status, rootpath = self._get_cached_root(variant)
        if status in no_op_statuses:
            return (rootpath, status)

        # 1.
        path = self._get_hash_path(variant)
        safe_makedirs(path)

        # construct data to store to json file
        data = {"handle": variant.handle.to_dict()}

        if variant.index is not None:
            # just added for debugging purposes
            data["data"] = package.data["variants"][variant.index]

        # 2. + 5.
        with self._lock():
            # Check if variant exists again, another proc could have created it
            # just before lock acquire
            #
            status, rootpath = self._get_cached_root(variant)
            if status in no_op_statuses:
                return (rootpath, status)

            # determine next increment name ('a', 'b' etc)
            names = os.listdir(path)
            names = [x for x in names if x.endswith(".json")]

            if names:
                prev = os.path.splitext(max(names))[0]
            else:
                prev = None

            incname = get_next_base26(prev)

            # 3.
            copying_filepath = os.path.join(path, ".copying-" + incname)
            with open(copying_filepath, 'w'):
                pass

            # 4.
            json_filepath = os.path.join(path, incname + ".json")
            with open(json_filepath, 'w') as f:
                f.write(json.dumps(data))

        # 6.
        #
        # Here we continually update mtime on the .copying file, to indicate
        # that the copy is active. This allows us to detect stalled/errored
        # copies, and report them as VARIANT_COPY_STALLED status.
        #
        still_copying = True

        def _while_copying():
            while still_copying:
                time.sleep(self._COPYING_TIME_INC)
                try:
                    os.utime(copying_filepath, None)
                except:
                    pass

        rootpath = os.path.join(path, incname)
        th = threading.Thread(target=_while_copying)
        th.daemon = True
        th.start()

        try:
            shutil.copytree(variant_root, rootpath)
        finally:
            still_copying = False

        # 7.
        th.join()
        os.remove(copying_filepath)

        return (rootpath, self.VARIANT_CREATED)