Exemple #1
0
    def _get_cache(self, deps_hash, targets):
        if deps_hash is None:
            return False
        cache_src = deps_hash.hex()
        try:
            for target in targets:
                f = hashlib.sha1(target.encode("ascii")).hexdigest()
                src = join(cache_src, f)
                obj = self.mc.fget_object(
                    bucket_name=global_config.remote_bucket,
                    object_name=src,
                    file_path=target)
                st_mode = int(obj.metadata["X-Amz-Meta-St_mode"])
                self._set_chmod(target, st_mode)
        except KeyError:
            # some cases with minio that .metadata["X-Amz-Meta-St_mode"] is not exists
            # the file will be pushed again after compilation
            out.print_fail("metadata not exists")
            return False
        except error.NoSuchKey:
            return False
        except (urllib3.exceptions.ReadTimeoutError,
                urllib3.exceptions.MaxRetryError,
                urllib3.exceptions.ProtocolError):
            self._increase_timeout_and_check()
            return False
        except error.RequestTimeTooSkewed:
            out.print_fail(
                "Time on your host not configured currectlly, remote-cache is disabled"
            )
            global_config.remote_cache_enable = False
            return False

        return True
Exemple #2
0
 def _increase_timeout_and_check(self):
     self.n_timeouts += 1
     if self.n_timeouts >= 3:
         out.print_fail(
             f"remote cache timedout {self.n_timeouts} time, disabling remote cahce"
         )
         global_config.remote_cache_enable = False
Exemple #3
0
 def _save_cache(self, deps_hash, targets):
     cache_dst = deps_hash.hex()
     # fd, lock_path = fs_lock(cache_dst)
     # if fd == None:
     #     return
     try:
         # shutil.rmtree(cache_dst, ignore_errors=True)
         # os.mkdir(cache_dst)
         for target in targets:
             dst = join(cache_dst,
                        hashlib.sha1(target.encode("ascii")).hexdigest())
             file_attr = {"st_mode": self._get_chmod(target)}
             self.mc.fput_object(bucket_name=global_config.remote_bucket,
                                 object_name=dst,
                                 file_path=target,
                                 metadata=file_attr)
     except (urllib3.exceptions.ReadTimeoutError,
             urllib3.exceptions.MaxRetryError,
             urllib3.exceptions.ProtocolError):
         self._increase_timeout_and_check()
     except error.RequestTimeTooSkewed:
         out.print_fail(
             "Time on your host not configured currectlly, remote-cache is disabled"
         )
         global_config.remote_cache_enable = False
     finally:
         # fs_unlock(fd, lock_path)
         pass
Exemple #4
0
 def save_cache(self, cache_hash, metadata_cache: MetadataCache):
     cache_src = "md-" + cache_hash.hex()
     md = pickle.dumps(metadata_cache, protocol=pickle.HIGHEST_PROTOCOL)
     try:
         self.mc.put_object(bucket_name=global_config.remote_bucket,
                            object_name=cache_src,
                            data=io.BytesIO(md),
                            length=len(md))
     except (urllib3.exceptions.ReadTimeoutError,
             urllib3.exceptions.MaxRetryError,
             urllib3.exceptions.ProtocolError):
         self._increase_timeout_and_check()
     except error.RequestTimeTooSkewed:
         out.print_fail(
             "Time on your host not configured currectlly, remote-cache is disabled"
         )
         global_config.remote_cache_enable = False
Exemple #5
0
    def gc(self):
        def remove(path):
            """ param <path> could either be relative or absolute. """
            if os.path.isfile(path):
                os.remove(path)  # remove the file
            elif os.path.isdir(path):
                shutil.rmtree(path)  # remove dir and all contains
            else:
                raise ValueError("file {} is not a file or dir.".format(path))

        with Timer("done cache gc") as timer:
            cache_dir_size_KB = get_size_KB(UMAKE_BUILD_CACHE_DIR)
            high_thresh = cache_dir_size_KB * 1.1
            low_tresh = global_config.local_cache_size * 1024 * 0.6

            if global_config.local_cache_size * 1024 > high_thresh:
                return

            fd, lock_path = fs_lock(UMAKE_BUILD_CACHE_DIR)
            if fd == None:
                out.print_fail(f"\tcahce: {UMAKE_BUILD_CACHE_DIR} is locked")
                return
            try:
                cache_entry_size = 0
                cache_dir = check_output(
                    ['ls', '-lru', '--sort=time',
                     UMAKE_BUILD_CACHE_DIR]).decode('utf-8')
                for cache_line in cache_dir.splitlines():
                    try:
                        _, _, _, _, _, _, _, _, cache_entry_name = cache_line.split(
                        )
                        cache_entry_full_path = join(UMAKE_BUILD_CACHE_DIR,
                                                     cache_entry_name)
                        remove(cache_entry_full_path)
                        cache_entry_size = get_size_KB(UMAKE_BUILD_CACHE_DIR)
                        if cache_entry_size < low_tresh:
                            break
                    except ValueError:
                        pass
                timer.set_postfix(
                    f"freed {int((cache_dir_size_KB - cache_entry_size) / 1024)}MB"
                )
            finally:
                fs_unlock(fd, lock_path)
Exemple #6
0
 def open_cache(self, cache_hash) -> MetadataCache:
     cache_src = "md-" + cache_hash.hex()
     try:
         metadata_file = self.mc.get_object(
             bucket_name=global_config.remote_bucket, object_name=cache_src)
         metadata = pickle.loads(metadata_file.read())
         return metadata
     except (urllib3.exceptions.ReadTimeoutError,
             urllib3.exceptions.MaxRetryError,
             urllib3.exceptions.ProtocolError):
         self._increase_timeout_and_check()
         raise FileNotFoundError
     except error.RequestTimeTooSkewed:
         out.print_fail(
             "Time on your host not configured currectlly, remote-cache is disabled"
         )
         global_config.remote_cache_enable = False
         raise FileNotFoundError
     except error.NoSuchKey:
         raise FileNotFoundError