예제 #1
0
    def _get_cached_root(self, variant):
        path = self._get_hash_path(variant)
        if not os.path.exists(path):
            return (self.VARIANT_NOT_FOUND, '')

        handle_dict = variant.handle.to_dict()

        for name in os.listdir(path):
            if name.endswith(".json"):
                incname = os.path.splitext(name)[0]
                json_filepath = os.path.join(path, name)
                rootpath = os.path.join(path, incname)
                copying_filepath = os.path.join(path, ".copying-" + incname)

                try:
                    with open(json_filepath) as f:
                        data = json.loads(f.read())
                except IOError as e:
                    if e.errno == errno.ENOENT:
                        # maybe got cleaned up by other process
                        continue
                    else:
                        raise

                if data.get("handle") == handle_dict:
                    if os.path.exists(copying_filepath):
                        try:
                            st = os.stat(copying_filepath)
                            secs = time.time() - st.st_mtime
                            if secs > self._COPYING_TIME_MAX:
                                return (self.VARIANT_COPY_STALLED, rootpath)
                        except:
                            # maybe .copying file was deleted just now
                            pass

                        return (self.VARIANT_COPYING, rootpath)
                    else:
                        return (self.VARIANT_FOUND, rootpath)

        return (self.VARIANT_NOT_FOUND, '')
예제 #2
0
파일: config.py 프로젝트: nerdvegas/rez
    def _validate(self, data):
        # overridden settings take precedence. Note that `data` has already
        # taken override into account at this point
        if self.key in self.config.overrides:
            return data

        if not self.config.locked:

            # next, env-var
            value = os.getenv(self._env_var_name)
            if value is not None:
                return self._parse_env_var(value)

            # next, JSON-encoded env-var
            varname = self._env_var_name + "_JSON"
            value = os.getenv(varname)
            if value is not None:
                from rez.utils import json

                try:
                    return json.loads(value)
                except ValueError:
                    raise ConfigurationError(
                        "Expected $%s to be JSON-encoded string." % varname
                    )

        # next, data unchanged
        if data is not None:
            return data

        # some settings have a programmatic default
        attr = "_get_%s" % self.key
        if hasattr(self.config, attr):
            return getattr(self.config, attr)()

        # setting is None
        return None
예제 #3
0
    def _validate(self, data):
        # overridden settings take precedence. Note that `data` has already
        # taken override into account at this point
        if self.key in self.config.overrides:
            return data

        if not self.config.locked:

            # next, env-var
            value = os.getenv(self._env_var_name)
            if value is not None:
                return self._parse_env_var(value)

            # next, JSON-encoded env-var
            varname = self._env_var_name + "_JSON"
            value = os.getenv(varname)
            if value is not None:
                from rez.utils import json

                try:
                    return json.loads(value)
                except ValueError:
                    raise ConfigurationError(
                        "Expected $%s to be JSON-encoded string." % varname
                    )

        # next, data unchanged
        if data is not None:
            return data

        # some settings have a programmatic default
        attr = "_get_%s" % self.key
        if hasattr(self.config, attr):
            return getattr(self.config, attr)()

        # setting is None
        return None
예제 #4
0
    def _run_daemon_step(self, state):
        logger = state["logger"]

        # pick a random pending variant to copy
        pending_filenames = set(os.listdir(self._pending_dir))
        pending_filenames -= set(state.get("copying", set()))
        if not pending_filenames:
            return False

        i = random.randint(0, len(pending_filenames) - 1)
        filename = list(pending_filenames)[i]
        filepath = os.path.join(self._pending_dir, filename)

        try:
            with open(filepath) as f:
                variant_handle_dict = json.loads(f.read())
        except IOError as e:
            if e.errno == errno.ENOENT:
                return True  # was probably deleted by another rez-pkg-cache proc
            raise

        variant = get_variant(variant_handle_dict)

        # copy the variant and log activity
        logger.info("Started caching of variant %s...", variant.uri)
        t = time.time()

        try:
            rootpath, status = self.add_variant(variant)

        except PackageCacheError as e:
            # variant cannot be cached, so remove as a pending variant
            logger.warning(str(e))
            safe_remove(filepath)
            return True

        except Exception:
            # This is probably an error during shutil.copytree (eg a perms fail).
            # In this case, the variant will be in VARIANT_COPYING status, and
            # will shortly transition to VARIANT_COPY_STALLED. Thus we can
            # remove the pending variant, as there's nothing more we can do.
            #
            logger.exception("Failed to add variant to the cache")
            safe_remove(filepath)
            return True

        secs = time.time() - t

        if status == self.VARIANT_FOUND:
            logger.info("Variant was already cached at %s", rootpath)
        elif status == self.VARIANT_COPYING:
            logger.info("Variant is already being copied to %s", rootpath)
        elif status == self.VARIANT_COPY_STALLED:
            logger.info("Variant is stalled copying to %s", rootpath)
        else:  # VARIANT_CREATED
            logger.info("Cached variant to %s in %g seconds", rootpath, secs)

        if status == self.VARIANT_COPYING:
            # we cannot delete the pending file (another proc is copying the
            # variant, so it's responsible); but we also have to ignore this
            # variant from now on.
            #
            state.setdefault("copying", set()).add(filename)
        else:
            safe_remove(filepath)

        return True
예제 #5
0
    def get_variants(self):
        """Get variants and their current statuses from the cache.

        Returns:
            List of 3-tuple:
            - `Variant`: The cached variant
            - str: Local cache path for variant, if determined ('' otherwise)
            - int: Status. One of:
              - VARIANT_FOUND
              - VARIANT_COPYING
              - VARIANT_COPY_STALLED
              - VARIANT_PENDING
        """
        from rez.packages import get_variant

        statuses = (self.VARIANT_FOUND, self.VARIANT_COPYING,
                    self.VARIANT_COPY_STALLED)

        results = []
        seen_variants = set()

        # find variants in cache
        for pkg_name in safe_listdir(self.path):
            if pkg_name.startswith('.'):
                continue  # dirs for internal cache use

            path1 = os.path.join(self.path, pkg_name)

            for ver_str in safe_listdir(path1):
                path2 = os.path.join(path1, ver_str)

                for hash_str in safe_listdir(path2):
                    path3 = os.path.join(path2, hash_str)

                    for name in safe_listdir(path3):
                        if name.endswith(".json"):
                            with open(os.path.join(path3, name)) as f:
                                data = json.loads(f.read())

                            handle = data["handle"]
                            variant = get_variant(handle)

                            status, rootpath = self._get_cached_root(variant)
                            if status in statuses:
                                results.append((variant, rootpath, status))
                                seen_variants.add(variant)

        # find pending variants
        pending_filenames = os.listdir(self._pending_dir)

        for name in pending_filenames:
            filepath = os.path.join(self._pending_dir, name)

            try:
                with open(filepath) as f:
                    variant_handle_dict = json.loads(f.read())
            except:
                continue  # maybe file was just deleted

            variant = get_variant(variant_handle_dict)
            if variant not in seen_variants:
                results.append((variant, '', self.VARIANT_PENDING))
                seen_variants.add(variant)

        return results
예제 #6
0
    def add_variants_async(self, variants):
        """Update the package cache by adding some or all of the given variants.

        This method is called when a context is created or sourced. Variants
        are then added to the cache in a separate process.
        """

        # A prod install is necessary because add_variants_async works by
        # starting a rez-pkg-cache proc, and this can only be done reliably in
        # a prod install. On non-windows we could fork instead, but there would
        # remain no good solution on windows.
        #
        if not system.is_production_rez_install:
            raise PackageCacheError(
                "PackageCache.add_variants_async is only supported in a "
                "production rez installation.")

        variants_ = []

        # trim down to those variants that are cachable, and not already cached
        for variant in variants:
            if not variant.parent.is_cachable:
                continue

            status, _ = self._get_cached_root(variant)
            if status == self.VARIANT_NOT_FOUND:
                variants_.append(variant)

        # if there are no variants to add, and no potential cleanup to do, then exit
        if not variants_ and config.package_cache_clean_limit < 0:
            return

        # Write each variant out to a file in the 'pending' dir in the cache. A
        # separate proc reads these files and then performs the actual variant
        # copy. Note that these files are unique, in case two rez procs attempt
        # to write out the same pending variant file at the same time.
        #
        pending_filenames = os.listdir(self._pending_dir)

        for variant in variants_:
            prefix = variant.parent.qualified_name + '-'
            handle_dict = variant.handle.to_dict()
            already_pending = False

            # check if this variant is already pending
            for filename in pending_filenames:
                if filename.startswith(prefix):
                    filepath = os.path.join(self._pending_dir, filename)
                    try:
                        with open(filepath) as f:
                            data = json.loads(f.read())
                    except:
                        continue  # maybe file was just deleted

                    if data == handle_dict:
                        already_pending = True
                        break

            if already_pending:
                continue

            filename = prefix + uuid4().hex + ".json"
            filepath = os.path.join(self._pending_dir, filename)
            with open(filepath, 'w') as f:
                f.write(json.dumps(handle_dict))

        # configure executable
        if platform.system() == "Windows":
            kwargs = {"creationflags": subprocess.CREATE_NEW_PROCESS_GROUP}
        else:
            kwargs = {"preexec_fn": os.setsid}

        exe = os.path.join(system.rez_bin_path, "rez-pkg-cache")
        if not exe:
            # this should not happen
            raise RuntimeError("Did not find rez-pkg-cache executable")

        # start caching subproc
        args = [exe, "--daemon", self.path]

        try:
            with open(os.devnull, 'w') as devnull:

                # don't suppress output if selftest running, easier to debug
                if system.selftest_is_running:
                    out_target = None
                else:
                    out_target = devnull

                _ = subprocess.Popen([exe, "--daemon", self.path],
                                     stdout=out_target,
                                     stderr=out_target,
                                     **kwargs)
        except Exception as e:
            print_warning(
                "Failed to start package caching daemon (command: %s): %s",
                ' '.join(args), e)