def lock(self): if self._lockfile: return self.ensure_dir_exists(os.path.dirname(self.package_dir)) self._lockfile = LockFile(self.package_dir) self.ensure_dir_exists(self.package_dir) self._lockfile.acquire()
def _lock_state_file(self): if not self.lock: return self._lockfile = LockFile(self.path) try: self._lockfile.acquire() except IOError: raise exception.HomeDirPermissionsError(dirname(self.path))
def _lock_dbindex(self): self._lockfile = LockFile(self.cache_dir) try: self._lockfile.acquire() except: # pylint: disable=bare-except return False return True
def download(self, url, checksum=None, silent=False): dl_path = self.compute_download_path(url, checksum or "") if os.path.isfile(dl_path): self.set_download_utime(dl_path) return dl_path with_progress = not silent and not app.is_disabled_progressbar() tmp_fd, tmp_path = tempfile.mkstemp(dir=self.get_download_dir()) try: with LockFile(dl_path): try: fd = FileDownloader(url) fd.set_destination(tmp_path) fd.start(with_progress=with_progress, silent=silent) except IOError as e: raise_error = not with_progress if with_progress: try: fd = FileDownloader(url) fd.set_destination(tmp_path) fd.start(with_progress=False, silent=silent) except IOError: raise_error = True if raise_error: self.print_message( "Error: Please read http://bit.ly/package-manager-ioerror", fg="red", err=True, ) raise e if checksum: fd.verify(checksum) os.close(tmp_fd) os.rename(tmp_path, dl_path) finally: if os.path.isfile(tmp_path): os.close(tmp_fd) os.remove(tmp_path) assert os.path.isfile(dl_path) self.set_download_utime(dl_path) return dl_path
class State(object): def __init__(self, path=None, lock=False): self.path = path self.lock = lock if not self.path: self.path = join(get_project_core_dir(), "appstate.json") self._storage = {} self._lockfile = None self.modified = False def __enter__(self): try: self._lock_state_file() if isfile(self.path): self._storage = fs.load_json(self.path) assert isinstance(self._storage, dict) except ( AssertionError, ValueError, UnicodeDecodeError, exception.InvalidJSONFile, ): self._storage = {} return self def __exit__(self, type_, value, traceback): if self.modified: try: with open(self.path, "w") as fp: fp.write(dump_json_to_unicode(self._storage)) except IOError: raise exception.HomeDirPermissionsError(get_project_core_dir()) self._unlock_state_file() def _lock_state_file(self): if not self.lock: return self._lockfile = LockFile(self.path) try: self._lockfile.acquire() except IOError: raise exception.HomeDirPermissionsError(dirname(self.path)) def _unlock_state_file(self): if hasattr(self, "_lockfile") and self._lockfile: self._lockfile.release() def __del__(self): self._unlock_state_file() # Dictionary Proxy def as_dict(self): return self._storage def keys(self): return self._storage.keys() def get(self, key, default=True): return self._storage.get(key, default) def update(self, *args, **kwargs): self.modified = True return self._storage.update(*args, **kwargs) def clear(self): return self._storage.clear() def __getitem__(self, key): return self._storage[key] def __setitem__(self, key, value): self.modified = True self._storage[key] = value def __delitem__(self, key): self.modified = True del self._storage[key] def __contains__(self, item): return item in self._storage
def lock(self): if self._lockfile: return self._lockfile = LockFile(self.package_dir) self._lockfile.acquire()
class BasePackageManager( # pylint: disable=too-many-public-methods PackageManagerDownloadMixin, PackageManageRegistryMixin, PackageManagerInstallMixin, PackageManagerUninstallMixin, PackageManagerUpdateMixin, PackageManagerLegacyMixin, ): _MEMORY_CACHE = {} def __init__(self, pkg_type, package_dir): self.pkg_type = pkg_type self.package_dir = self.ensure_dir_exists(package_dir) self._MEMORY_CACHE = {} self._lockfile = None self._download_dir = None self._tmp_dir = None self._registry_client = None def lock(self): if self._lockfile: return self._lockfile = LockFile(self.package_dir) self._lockfile.acquire() def unlock(self): if hasattr(self, "_lockfile") and self._lockfile: self._lockfile.release() self._lockfile = None def __del__(self): self.unlock() def memcache_get(self, key, default=None): return self._MEMORY_CACHE.get(key, default) def memcache_set(self, key, value): self._MEMORY_CACHE[key] = value def memcache_reset(self): self._MEMORY_CACHE.clear() @staticmethod def is_system_compatible(value): if not value or "*" in value: return True return util.items_in_list(value, util.get_systype()) @staticmethod def ensure_dir_exists(path): if not os.path.isdir(path): try: os.makedirs(path) except: # pylint: disable=bare-except pass assert os.path.isdir(path) return path @staticmethod def ensure_spec(spec): return spec if isinstance(spec, PackageSpec) else PackageSpec(spec) @property def manifest_names(self): raise NotImplementedError def print_message(self, message, **kwargs): click.echo( "%s: " % str(self.__class__.__name__).replace("Package", " "), nl=False ) click.secho(message, **kwargs) def get_download_dir(self): if not self._download_dir: self._download_dir = self.ensure_dir_exists( os.path.join(get_project_cache_dir(), "downloads") ) return self._download_dir def get_tmp_dir(self): if not self._tmp_dir: self._tmp_dir = self.ensure_dir_exists( os.path.join(get_project_cache_dir(), "tmp") ) return self._tmp_dir def find_pkg_root(self, path, spec): # pylint: disable=unused-argument if self.manifest_exists(path): return path for root, _, _ in os.walk(path): if self.manifest_exists(root): return root raise MissingPackageManifestError(", ".join(self.manifest_names)) def get_manifest_path(self, pkg_dir): if not os.path.isdir(pkg_dir): return None for name in self.manifest_names: manifest_path = os.path.join(pkg_dir, name) if os.path.isfile(manifest_path): return manifest_path return None def manifest_exists(self, pkg_dir): return self.get_manifest_path(pkg_dir) def load_manifest(self, src): path = src.path if isinstance(src, PackageItem) else src cache_key = "load_manifest-%s" % path result = self.memcache_get(cache_key) if result: return result candidates = ( [os.path.join(path, name) for name in self.manifest_names] if os.path.isdir(path) else [path] ) for item in candidates: if not os.path.isfile(item): continue try: result = ManifestParserFactory.new_from_file(item).as_dict() self.memcache_set(cache_key, result) return result except ManifestException as e: if not PlatformioCLI.in_silence(): self.print_message(str(e), fg="yellow") raise MissingPackageManifestError(", ".join(self.manifest_names)) @staticmethod def generate_rand_version(): return datetime.now().strftime("0.0.0+%Y%m%d%H%M%S") def build_metadata(self, pkg_dir, spec, vcs_revision=None): manifest = self.load_manifest(pkg_dir) metadata = PackageMetaData( type=self.pkg_type, name=manifest.get("name"), version=manifest.get("version"), spec=spec, ) if not metadata.name or spec.has_custom_name(): metadata.name = spec.name if vcs_revision: metadata.version = "%s+sha.%s" % ( metadata.version if metadata.version else "0.0.0", vcs_revision, ) if not metadata.version: metadata.version = self.generate_rand_version() return metadata def get_installed(self): cache_key = "get_installed" if self.memcache_get(cache_key): return self.memcache_get(cache_key) result = [] for name in sorted(os.listdir(self.package_dir)): if name.startswith("_tmp_installing"): # legacy tmp folder continue pkg_dir = os.path.join(self.package_dir, name) if not os.path.isdir(pkg_dir): continue pkg = PackageItem(pkg_dir) if not pkg.metadata: try: spec = self.build_legacy_spec(pkg_dir) pkg.metadata = self.build_metadata(pkg_dir, spec) except MissingPackageManifestError: pass if not pkg.metadata: continue if self.pkg_type == PackageType.TOOL: try: if not self.is_system_compatible( self.load_manifest(pkg).get("system") ): continue except MissingPackageManifestError: pass result.append(pkg) self.memcache_set(cache_key, result) return result def get_package(self, spec): if isinstance(spec, PackageItem): return spec spec = self.ensure_spec(spec) best = None for pkg in self.get_installed(): if not self.test_pkg_spec(pkg, spec): continue assert isinstance(pkg.metadata.version, semantic_version.Version) if spec.requirements and pkg.metadata.version not in spec.requirements: continue if not best or (pkg.metadata.version > best.metadata.version): best = pkg return best @staticmethod def test_pkg_spec(pkg, spec): # "id" mismatch if spec.id and spec.id != pkg.metadata.spec.id: return False # external "URL" mismatch if spec.external: # local folder mismatch if os.path.realpath(spec.url) == os.path.realpath(pkg.path) or ( spec.url.startswith("file://") and os.path.realpath(pkg.path) == os.path.realpath(spec.url[7:]) ): return True if spec.url != pkg.metadata.spec.url: return False # "owner" mismatch elif spec.owner and not ci_strings_are_equal( spec.owner, pkg.metadata.spec.owner ): return False # "name" mismatch elif not spec.id and not ci_strings_are_equal(spec.name, pkg.metadata.name): return False return True
class ContentCache(object): def __init__(self, namespace=None): self.cache_dir = os.path.join(get_project_cache_dir(), namespace or "content") self._db_path = os.path.join(self.cache_dir, "db.data") self._lockfile = None if not os.path.isdir(self.cache_dir): os.makedirs(self.cache_dir) def __enter__(self): # cleanup obsolete items self.delete() return self def __exit__(self, type_, value, traceback): pass @staticmethod def key_from_args(*args): h = hashlib.sha1() for arg in args: if arg: h.update(hashlib_encode_data(arg)) return h.hexdigest() def get_cache_path(self, key): assert "/" not in key and "\\" not in key key = str(key) assert len(key) > 3 return os.path.join(self.cache_dir, key) def get(self, key): cache_path = self.get_cache_path(key) if not os.path.isfile(cache_path): return None with codecs.open(cache_path, "rb", encoding="utf8") as fp: return fp.read() def set(self, key, data, valid): if not app.get_setting("enable_cache"): return False cache_path = self.get_cache_path(key) if os.path.isfile(cache_path): self.delete(key) if not data: return False tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400} assert valid.endswith(tuple(tdmap)) expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1])) if not self._lock_dbindex(): return False if not os.path.isdir(os.path.dirname(cache_path)): os.makedirs(os.path.dirname(cache_path)) try: with codecs.open(cache_path, "wb", encoding="utf8") as fp: fp.write(data) with open(self._db_path, "a") as fp: fp.write("%s=%s\n" % (str(expire_time), os.path.basename(cache_path))) except UnicodeError: if os.path.isfile(cache_path): try: os.remove(cache_path) except OSError: pass return self._unlock_dbindex() def delete(self, keys=None): """ Keys=None, delete expired items """ if not os.path.isfile(self._db_path): return None if not keys: keys = [] if not isinstance(keys, list): keys = [keys] paths_for_delete = [self.get_cache_path(k) for k in keys] found = False newlines = [] with open(self._db_path) as fp: for line in fp.readlines(): line = line.strip() if "=" not in line: continue expire, fname = line.split("=") path = os.path.join(self.cache_dir, fname) try: if (time() < int(expire) and os.path.isfile(path) and path not in paths_for_delete): newlines.append(line) continue except ValueError: pass found = True if os.path.isfile(path): try: os.remove(path) if not os.listdir(os.path.dirname(path)): fs.rmtree(os.path.dirname(path)) except OSError: pass if found and self._lock_dbindex(): with open(self._db_path, "w") as fp: fp.write("\n".join(newlines) + "\n") self._unlock_dbindex() return True def clean(self): if not os.path.isdir(self.cache_dir): return fs.rmtree(self.cache_dir) def _lock_dbindex(self): self._lockfile = LockFile(self.cache_dir) try: self._lockfile.acquire() except: # pylint: disable=bare-except return False return True def _unlock_dbindex(self): if self._lockfile: self._lockfile.release() return True