def _get_cache_dir() -> Path: if os.name == 'nt': path = Path(os.path.expandvars('%LocalAppData%/jcotton42/ficdl/cache')) else: path = xdg_cache_home().joinpath('jcotton42/ficdl') path.mkdir(mode=0o770, parents=True, exist_ok=True) return path
class Cached: """Cached resource.""" cachedir = xdg_cache_home( ) / "gallica_autobib" # TODO what happens if not on unix? CACHEFN = "cache.db" def __init__(self, cachename: str) -> None: """A resource in the cache, stored in a separate table.""" self.tablename = cachename if not self.cachedir.exists(): self.cachedir.mkdir(parents=True) cache = self.cachedir / self.CACHEFN self.con = sqlite3.connect(cache) MAKE_TABLE = f'CREATE TABLE IF NOT EXISTS "{cachename}" (key TEXT PRIMARY KEY, value BLOB)' self.con.execute(MAKE_TABLE) self.con.commit() def __del__(self) -> None: self.con.close() def __getitem__(self, key: str) -> Optional[Any]: GET_ITEM = f'SELECT value FROM "{self.tablename}" WHERE key = (?)' item = self.con.execute(GET_ITEM, (key, )).fetchone() if item: return jsonpickle.loads(item[0]) else: return None def __setitem__(self, key: str, val: Any) -> None: SET = f'REPLACE INTO "{self.tablename}" (key, value) VALUES (?,?)' self.con.execute(SET, (key, jsonpickle.dumps(val))) self.con.commit()
def _fix_old_cache_dir(): # 2/14/2021 -- We had been using the 'rmfuse' cache directory since # early development. Let's use rmcl instead. But we'll move the # existing DB over, if it exists and the new one doesn't. old_cache_file = xdg_cache_home() / 'rmfuse' / 'filedata.db' if old_cache_file.exists() and not CACHE_FILE.exists(): CACHE_FILE.parent.mkdir(parents=True, exist_ok=True) old_cache_file.rename(CACHE_FILE)
class AppConfig(GoodConf): "Configuration for My App" # GitHub pat: str hooksecret: str # Runner config_home: pathlib.Path = xdg.xdg_config_home() / appname cache_home: pathlib.Path = xdg.xdg_cache_home() / appname dirs: typing.Any = None @validator('dirs', pre=True, always=True) def default_dirs(cls, v, *, values, **kwargs): return makepaths(values['config_home'], values['cache_home']) prefix: str remotes: typing.Dict[str, Remote] runnermap: typing.List[RunnerConf] web_host: IPvAnyAddress = ipaddress.IPv4Address('0.0.0.0') web_port: int = 5000 web_tls: bool = True cleanup: bool = True # For testing activecfg: typing.FrozenSet[str] = frozenset() max_workers: int def_repo_args: dict = {} def_org_args: dict = {} class Config: default_files = def_configs file_env_var = "LXDRCFG" @root_validator def check_image_sources(cls, values): error = "" for rc in values.get('runnermap'): if ":" in rc.image: rem = rc.image.split(":")[0] if rem not in values.get("remotes"): error += f"Remote '{rem}' is undefined\n" if error: raise ValueError(error) return values def key_pair_paths(self): return (self.config_home / "client.crt", self.config_home / "client.key") def app_paths(self): return [self.config_home, self.cache_home] + list( self.dirs.dict().values()) def config_exists(self): return [cfgfile for cfgfile in def_configs if cfgfile.exists()]
def build_ext(): logger.info("In build_wheel we are using a custom build API backboned by poetry") os.makedirs(DIR_RUNTIME, exist_ok = True) logger.info("Copy scripts") shutil.copy( DIR_EXT_SRC / "tsurgeon_script", DIR_RUNTIME / "tsurgeon_script" ) shutil.copy( DIR_EXT_SCRIPTS / "simplify-tag.sed", DIR_RUNTIME / "simplify-tag.sed" ) # Clean the decrypt folder DIR_RUNTIME_DECRYPT = pathlib.Path(DIR_RUNTIME / "decrypt") if DIR_RUNTIME_DECRYPT.exists(): if DIR_RUNTIME_DECRYPT.is_dir(): shutil.rmtree(DIR_RUNTIME_DECRYPT) else: raise FileExistsError # === END IF === shutil.copytree( DIR_EXT_SCRIPTS / "decrypt", DIR_RUNTIME_DECRYPT, ) shutil.copy( DIR_EXT_SCRIPTS / "supertagger_default.jsonnet", DIR_RUNTIME / "supertagger_default.jsonnet", ) logger.info("Preprocess the tsurgeon scripts") os.makedirs(DIR_RUNTIME / "tsurgeon-debug", exist_ok = True) subprocess.run( fr""" ./lit \ --input "{DIR_EXT_SCRIPTS}/*.tsgn.md" \ --output "{DIR_RUNTIME}/tsurgeon-debug/" \ --pattern "tsurgeon"; \ cat \ {DIR_EXT_SCRIPTS}/pretreatments.tsgn.md \ {DIR_EXT_SCRIPTS}/dependency.tsgn.md \ {DIR_EXT_SCRIPTS}/dependency-post.tsgn.md \ | ./lit --stdio --pattern "tsurgeon" \ > {DIR_RUNTIME}/pre-relabel.tsgn; """, shell = True ).check_returncode() logger.info("Build abs-hs (via stack)") res_stack = subprocess.Popen( ( "stack", "--local-bin-path", "../../" / DIR_RUNTIME, "build", "--copy-bins", "--no-haddock", ), cwd = DIR_EXT_SRC / "abc-hs", ) res_stack.wait() if res_stack.returncode: # !== 0 raise subprocess.CalledProcessError( res_stack.returncode, res_stack.args ) # === END IF === logger.info("Successfully built abs-hs (via stack)") # === Get stanford-tregex.jar === logger.info("Obtain Stanford Tregex") tregex_zip_cache_path = xdg.xdg_cache_home() / "ABCTreebank-build/stanford-tregex-4.2.0.zip" if os.path.isfile(tregex_zip_cache_path) and zipfile.is_zipfile(tregex_zip_cache_path): # TODO: SHA logger.info( f"An Stanford Tregex zip cache is found at {tregex_zip_cache_path}" ) else: logger.info( "No Stanford Tregex zip cache is available. " "Try to download one." ) tregex_zip_url = "https://nlp.stanford.edu/software/stanford-tregex-4.2.0.zip" logger.info(f"Download Stanford Tregex from {tregex_zip_url}") tregex_zip_size = int( requests.head(tregex_zip_url).headers["content-length"] ) tregex_zip_new = requests.get(tregex_zip_url, stream = True) os.makedirs(xdg.xdg_cache_home() / "ABCTreebank-build", exist_ok = True) unit_chunk: int = 1024 with tqdm.tqdm( total = tregex_zip_size, unit = "B", unit_scale = True, unit_divisor = unit_chunk ) as pb, open(tregex_zip_cache_path, "wb") as temp: pb.write("Downloading Stanford Tregex from the Internet ...") for chunk in tregex_zip_new.iter_content(chunk_size = 1024): temp.write(chunk) pb.update(len(chunk)) # === END FOR chunk === # === END WITH pb, temp === # === END IF === # https://stackoverflow.com/a/17729939 with zipfile.ZipFile(tregex_zip_cache_path) as zf: with zf.open("stanford-tregex-2020-11-17/stanford-tregex.jar") as jar_src, open(DIR_RUNTIME / "stanford-tregex.jar", "wb") as tregex_jar: shutil.copyfileobj(jar_src, tregex_jar) # === END WITH jar_src, tregex_jar === # === END WITH zf === return 0 # Succeded!
# Copyright 2020-2021 Robert Schroll # This file is part of rmcl and is distributed under the MIT license. import sqlite3 from xdg import xdg_cache_home CACHE_FILE = xdg_cache_home() / 'rmcl' / 'filedata.db' def _fix_old_cache_dir(): # 2/14/2021 -- We had been using the 'rmfuse' cache directory since # early development. Let's use rmcl instead. But we'll move the # existing DB over, if it exists and the new one doesn't. old_cache_file = xdg_cache_home() / 'rmfuse' / 'filedata.db' if old_cache_file.exists() and not CACHE_FILE.exists(): CACHE_FILE.parent.mkdir(parents=True, exist_ok=True) old_cache_file.rename(CACHE_FILE) _fix_old_cache_dir() _conn = None def _get_conn(): global _conn if _conn is not None: return _conn CACHE_FILE.parent.mkdir(parents=True, exist_ok=True) _conn = sqlite3.connect(CACHE_FILE) c = _conn.cursor() c.execute('''CREATE TABLE IF NOT EXISTS filedata (id TEXT, version INTEGER, property TEXT, value BLOB, UNIQUE(id, version, property))''')
import re from urllib import request, parse import os.path import json import spotipy from Xlib.display import Display from spotipy.oauth2 import SpotifyClientCredentials from xdg import xdg_cache_home, xdg_config_home from pathlib import Path import tempfile player = Playerctl.Player() playing = False previousAlbumArt = None #Used to not reblur if you're listening to an album cachedir = xdg_cache_home().joinpath("background-media") configdir = xdg_config_home().joinpath("background-media") tempdir = Path(os.path.join(tempfile.gettempdir(),"background-media")) #setup dirs cachedir.mkdir(parents=True, exist_ok=True) configdir.mkdir(parents=True, exist_ok=True) tempdir.mkdir(parents=True, exist_ok=True) def getResolution(): screen = Display(':0').screen() return "{}x{}".format(screen.width_in_pixels,screen.height_in_pixels) def squareResolution(): screen = Display(':0').screen() x = screen.width_in_pixels y = screen.height_in_pixels
def state_path(cls) -> Path: return xdg_cache_home().joinpath(f"cars.{cls.name}.state")
def get_user_cache_dir(user: str = None) -> Path: if _use_xdg(user): return xdg.xdg_cache_home() return get_user_home(user) / ".cache"
import sys PY_VER = sys.version_info if PY_VER >= (3, 7): import importlib.resources as imp_res # type: ignore else: import importlib_resources as imp_res # type: ignore # === END IF === import xdg with imp_res.path("abctk", "runtime") as runtime_path: DIR_RUNTIME = runtime_path DIR_SHARE = xdg.xdg_data_home() / "ABCT-toolkit" DIR_CACHE = xdg.xdg_cache_home() / "ABCT-toolkit" _TOKEN_EMBEDDING_DIM = 200 _CHAR_EMBEDDING_DIM = 50 _CHAR_EMBEDDED_DIM = 100 _TOKEN_INDEXERS = { "tokens": { "type": "single_id", "lowercase_tokens": False, }, "token_characters": { "type": "characters", "min_padding_length": 5, "character_tokenizer": { "end_tokens": [
def test_xdg_cache_home_set(monkeypatch: MonkeyPatch) -> None: """Test xdg_cache_home when XDG_CACHE_HOME is set.""" monkeypatch.setenv("XDG_CACHE_HOME", "/xdg_cache_home") assert xdg.xdg_cache_home() == Path("/xdg_cache_home")
def test_xdg_cache_home_empty(monkeypatch: MonkeyPatch) -> None: """Test xdg_cache_home when XDG_CACHE_HOME is empty.""" monkeypatch.setenv("HOME", os.fspath(HOME_DIR)) monkeypatch.setenv("XDG_CACHE_HOME", "") assert xdg.xdg_cache_home() == HOME_DIR / ".cache"
def test_xdg_cache_home_unset(monkeypatch: MonkeyPatch) -> None: """Test xdg_cache_home when XDG_CACHE_HOME is unset.""" monkeypatch.delenv("XDG_CACHE_HOME", raising=False) monkeypatch.setenv("HOME", os.fspath(HOME_DIR)) assert xdg.xdg_cache_home() == HOME_DIR / ".cache"
from InquirerPy.separator import Separator from InquirerPy.validator import NumberValidator from loguru import logger from owntwin.builder.package import Package from owntwin.builder.terrain import extract_meshed_level from owntwin.builtin_datasources import gsi from xdg import xdg_cache_home app = typer.Typer() FILENAME = "twin.json" if os.name == "nt": CACHE_DIR = Path(os.path.expandvars("%APPDATA%/owntwin/cache")) else: CACHE_DIR = xdg_cache_home().joinpath("owntwin/") if not CACHE_DIR.exists(): CACHE_DIR.mkdir(parents=True) def load_config(): with open(FILENAME, "r") as f: twin = json.load(f) return twin def save_config(config, path): # NOTE: Use codecs.open for win with codecs.open(path, "w", "utf-8") as f: json.dump(config, f, ensure_ascii=False, indent=2)
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from pathlib import Path import xdg DIR = Path(xdg.xdg_cache_home()) / "pants_jupyter_plugin"
import os import threading import time import configparser import sys from os.path import join if sys.executable.startswith("/home/"): from xdg import xdg_cache_home cache_dir = xdg_cache_home() else: from xdg.BaseDirectory import xdg_cache_home cache_dir = xdg_cache_home colors = { "normal": "\033[00m", "red_error": "\033[91m", "green": "\033[92m", "red": "\033[91m", "blue": "\033[94m", "yellow": "\033[93m", } message = "" wall_names = {} HOME = os.environ.get("HOME") working_dir = join(cache_dir, "redpaper") settings_file = join(working_dir, "settings.ini")
def _default_path() -> Path: p = xdg_cache_home() / "omnilingo" p.mkdir(parents=True, exist_ok=True) return p