Example #1
0
    def _get_writable_cache_dir(self):
        """
        Get writable cache directory with fallback to user's cache directory
        and global temp directory

        :raises: CacheFileError when cached directory is not writable for user
        :return: path to cache directory
        :rtype: str
        """
        dir_path_data = self._get_default_cache_dir()

        if os.access(dir_path_data, os.W_OK):
            self._default_cache_file = True
            return dir_path_data

        dir_path_user = user_cache_dir(self._URLEXTRACT_NAME)
        if not os.path.exists(dir_path_user):
            try:
                os.makedirs(dir_path_user, exist_ok=True)
            except PermissionError:
                # if PermissionError exception is raised we should continue
                # and try to set the last fallback dir
                pass

        if os.access(dir_path_user, os.W_OK):
            return dir_path_user

        dir_path_temp = tempfile.gettempdir()
        if os.access(dir_path_temp, os.W_OK):
            return dir_path_temp

        raise CacheFileError("Cache directories are not writable.")
Example #2
0
def test_pylint_home() -> None:
    uhome = os.path.expanduser("~")
    if uhome == "~":
        expected = OLD_DEFAULT_PYLINT_HOME
    else:
        expected = platformdirs.user_cache_dir("pylint")
    assert config.PYLINT_HOME == expected
    assert PYLINT_HOME == expected
Example #3
0
    def __init__(self, app_name: str):
        self.app_name: str = str(app_name)
        self.cache_dir = PathPlus(
            platformdirs.user_cache_dir(f"{self.app_name}_cache"))
        self.cache_dir.maybe_make(parents=True)

        # Mapping of function names to their caches
        self.caches: Dict[str, Dict[str, Any]] = {}
Example #4
0
def _get_default_cache_path():
    path = platformdirs.user_cache_dir("zeep", False)
    try:
        os.makedirs(path)
    except OSError as exc:
        if exc.errno == errno.EEXIST and os.path.isdir(path):
            pass
        else:
            raise
    return os.path.join(path, "cache.db")
Example #5
0
def get_cache_dir() -> Path:
    """Get the cache directory used by black.

    Users can customize this directory on all systems using `BLACK_CACHE_DIR`
    environment variable. By default, the cache directory is the user cache directory
    under the black application.

    This result is immediately set to a constant `black.cache.CACHE_DIR` as to avoid
    repeated calls.
    """
    # NOTE: Function mostly exists as a clean way to test getting the cache directory.
    default_cache_dir = user_cache_dir("tan", version=__version__)
    cache_dir = Path(os.environ.get("BLACK_CACHE_DIR", default_cache_dir))
    return cache_dir
Example #6
0
	def __init__(self, app_name: str, expires_after: datetime.timedelta = datetime.timedelta(days=28)):
		self.app_name: str = str(app_name)
		self.cache_dir = PathPlus(platformdirs.user_cache_dir(self.app_name))
		self.cache_dir.maybe_make(parents=True)

		self.session: requests.Session = CacheControl(
				sess=requests.Session(),
				cache=FileCache(self.cache_dir),
				heuristic=ExpiresAfter(
						days=expires_after.days,
						seconds=expires_after.seconds,
						microseconds=expires_after.microseconds,
						),
				adapter_class=RateLimitAdapter
				)
Example #7
0
def test_pylint_home() -> None:
    uhome = os.path.expanduser("~")
    if uhome == "~":
        expected = ".pylint.d"
    else:
        expected = platformdirs.user_cache_dir("pylint")
    assert config.PYLINT_HOME == expected

    try:
        pylintd = join(tempfile.gettempdir(), ".pylint.d")
        os.environ["PYLINTHOME"] = pylintd
        try:
            reload(config)
            assert config.PYLINT_HOME == pylintd
        finally:
            try:
                rmtree(pylintd)
            except FileNotFoundError:
                pass
    finally:
        del os.environ["PYLINTHOME"]
    def __init__(self, identifier, key_builder=None, container_dir=None):
        self.identifier = identifier

        if key_builder is None:
            key_builder = KeyBuilder()

        self.key_builder = key_builder

        from os.path import join
        if container_dir is None:
            try:
                import platformdirs as appdirs
            except ImportError:
                import appdirs

            container_dir = join(
                appdirs.user_cache_dir("pytools", "pytools"),
                "pdict-v4-{}-py{}".format(
                    identifier, ".".join(str(i) for i in sys.version_info)))

        self.container_dir = container_dir

        self._make_container_dir()
Example #9
0
import atexit
import copy
import hashlib
import json
import os
import pathlib
import sys

import vistir
from packaging.requirements import Requirement
from pip_shims.shims import FAVORITE_HASH, SafeFileCache
from platformdirs import user_cache_dir

from .utils import as_tuple, get_pinned_version, key_from_req, lookup_table

CACHE_DIR = os.environ.get("PIPENV_CACHE_DIR", user_cache_dir("pipenv"))


# Pip-tools cache implementation
class CorruptCacheError(Exception):
    def __init__(self, path):
        self.path = path

    def __str__(self):
        lines = [
            "The dependency cache seems to have been corrupted.",
            "Inspect, or delete, the following file:",
            "  {}".format(self.path),
        ]
        return os.linesep.join(lines)
Example #10
0
import json
import sys
from datetime import datetime
from pathlib import Path
from pprint import pprint  # noqa: F401
from urllib.parse import urlparse

import httpx  # pip install httpx
from platformdirs import user_cache_dir  # pip install platformdirs
from slugify import slugify  # pip install python-slugify
from termcolor import colored, cprint  # pip install termcolor

import source_finder

BASE_URL = "https://pypi.org/pypi"
CACHE_DIR = Path(user_cache_dir("source-finder"))
USER_AGENT = "source_finder.py"
VERBOSE = False
PRINT = False


def _print_verbose(*args, **kwargs):
    """Print if verbose"""
    if PRINT and VERBOSE:
        _print_stderr(*args, **kwargs)


def _print_stderr(*args, **kwargs):
    """Print to stderr"""
    if PRINT:
        print(*args, file=sys.stderr, **kwargs)
Example #11
0
CONNECT_TIMEOUT = 5
MAX_DELAY = 60  # Maximum time to wait for rate-limiting before aborting
REQUEST_BURST_RATE = 5
REQUESTS_PER_SECOND = 1
REQUESTS_PER_MINUTE = 60
REQUESTS_PER_DAY = 10000
REQUEST_TIMEOUT = 10
REQUEST_RETRIES = 5  # Maximum number of retries for a failed request
RETRY_BACKOFF = 0.5  # Exponential backoff factor for retries

# HTTP methods that apply to write-only dry-run mode
WRITE_HTTP_METHODS = ['PATCH', 'POST', 'PUT', 'DELETE']

# Project directories
PROJECT_DIR = abspath(dirname(dirname(__file__)))
CACHE_DIR = user_cache_dir('pyinaturalist')
DOCS_DIR = join(PROJECT_DIR, 'docs')
DOWNLOAD_DIR = join(PROJECT_DIR, 'downloads')
EXAMPLES_DIR = join(PROJECT_DIR, 'examples')
SAMPLE_DATA_DIR = join(PROJECT_DIR, 'test', 'sample_data')

# Cache settings
CACHE_EXPIRATION = {
    'api.inaturalist.org/*autocomplete': timedelta(days=30),
    'api.inaturalist.org/v*/controlled_terms*': timedelta(days=7),
    'api.inaturalist.org/v*/places*': timedelta(days=7),
    'api.inaturalist.org/v*/taxa*': timedelta(days=7),
    f'{PHOTO_CC_BASE_URL}/*': -1,
    f'{PHOTO_BASE_URL}/*': -1,
    f'{ICONIC_TAXA_BASE_URL}/*': -1,
    '*': timedelta(minutes=30),
Example #12
0
from pathlib import Path
from shelve import Shelf
from typing import cast

from platformdirs import user_cache_dir

from .__version__ import __version__
from .fetcher import FundInfoFetcher
from .models import FundEstimateInfo, FundIARBCInfo, FundInfo, FundNetValueInfo
from .utils.tqdm import tqdm_asyncio

__all__ = ["get_fund_infos"]

PERSISTENT_CACHE_DIR = Path(
    user_cache_dir(appname="QuickFund",
                   appauthor="MapleCCC",
                   version=__version__))
PERSISTENT_CACHE_DIR.mkdir(parents=True, exist_ok=True)


async def update_estimate_info(fund_code: str, fund_info_db: Shelf[FundInfo],
                               fund_info_fetcher: FundInfoFetcher) -> None:
    if not FundEstimateInfo.is_latest(fund_info_db[fund_code]):
        estimate_info = await fund_info_fetcher.fetch_estimate(fund_code)
        fund_info_db[fund_code].replace(estimate_info=estimate_info)


async def update_net_value_info(fund_code: str, fund_info_db: Shelf[FundInfo],
                                fund_info_fetcher: FundInfoFetcher) -> None:
    if not FundNetValueInfo.is_latest(fund_info_db[fund_code]):
        net_value_info = await fund_info_fetcher.fetch_net_value(fund_code)
Example #13
0
from platformdirs import user_cache_dir

from black.mode import Mode

from _black_version import version as __version__


# types
Timestamp = float
FileSize = int
CacheInfo = Tuple[Timestamp, FileSize]
Cache = Dict[str, CacheInfo]


CACHE_DIR = Path(user_cache_dir("black", version=__version__))


def read_cache(mode: Mode) -> Cache:
    """Read the cache if it exists and is well formed.

    If it is not well formed, the call to write_cache later should resolve the issue.
    """
    cache_file = get_cache_file(mode)
    if not cache_file.exists():
        return {}

    with cache_file.open("rb") as fobj:
        try:
            cache: Cache = pickle.load(fobj)
        except (pickle.UnpicklingError, ValueError):
Example #14
0
from modlunky2.utils import is_windows

if is_windows():
    # Import for pyinstaller to detect this module
    import platformdirs.windows  # pylint: disable=unused-import

PROGRAMS_KEY = "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall"
DEFAULT_PATH = Path("C:/Program Files (x86)/Steam/steamapps/common/Spelunky 2")
EXE_NAME = "Spel2.exe"

APP_AUTHOR = "spelunky.fyi"
APP_NAME = "modlunky2"
CONFIG_DIR = Path(user_config_dir(APP_NAME, APP_AUTHOR))
DATA_DIR = Path(user_data_dir(APP_NAME, APP_AUTHOR))
CACHE_DIR = Path(user_cache_dir(APP_NAME, APP_AUTHOR))
SHOW_PACKING_DEFAULT = False

MIN_WIDTH = 1280
MIN_HEIGHT = 768


# Sentinel for tracking unset fields
NOT_PRESENT = object()

SPELUNKY_FYI_ROOT_DEFAULT = "https://spelunky.fyi/"
LAST_INSTALL_BROWSE_DEFAULT = "/"

DEFAULT_COLOR_KEY = "#ff00ff"

logger = logging.getLogger("modlunky2")
Example #15
0
# -*- coding=utf-8 -*-
from __future__ import absolute_import, print_function

import os

from platformdirs import user_cache_dir


def is_type_checking():
    try:
        from typing import TYPE_CHECKING
    except ImportError:
        return False
    return TYPE_CHECKING


REQUIREMENTSLIB_CACHE_DIR = os.getenv("REQUIREMENTSLIB_CACHE_DIR",
                                      user_cache_dir("pipenv"))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
Example #16
0
def compile_from_string(toolchain,
                        name,
                        source_string,
                        source_name=None,
                        cache_dir=None,
                        debug=False,
                        wait_on_error=None,
                        debug_recompile=True,
                        object=False,
                        source_is_binary=False,
                        sleep_delay=1):
    """Returns a tuple: mod_name, file_name, recompiled.
    mod_name is the name of the module represented by a compiled object,
    file_name is the name of the compiled object, which can be built from the
    source code(s) in *source_strings* if necessary,
    recompiled is True if the object had to be recompiled, False if the cache
    is hit.
    Raise :exc:`CompileError` in case of error.  The mod_name and file_name
    are designed to be used with load_dynamic to load a python module from
    this object, if desired.

    Compiled code is cached in *cache_dir* and available immediately if it has
    been compiled at some point in the past.  Compiler and Python API versions
    as well as versions of include files are taken into account when examining
    the cache. If *cache_dir* is ``None``, a default location is assumed. If it
    is ``False``, no caching is performed.  Proper locking is performed on the
    cache directory.  Simultaneous use of the cache by multiple processes works
    as expected, but may lead to delays because of locking. By default, a
    process waits for 1 second before reattempting to acquire the *cache_dir*
    lock. A different waiting time can be specified through *sleep_delay*.

    The code in *source_string* will be saved to a temporary file named
    *source_name* if it needs to be compiled.

    If *debug* is ``True``, commands involved in the build are printed.

    If *wait_on_error* is ``True``, the full path name of the temporary in
    which a :exc:`CompileError` occurred is shown and the user is expected
    to press a key before the temporary file gets deleted. If *wait_on_error*
    is ``None``, it is taken to be the same as *debug*.

    If *debug_recompile*, messages are printed indicating whether a
    recompilation is taking place.

    If *source_is_binary*, the source string is a compile object file and
    should be treated as binary for read/write purposes
    """
    if source_name is None:
        source_name = ["module.cpp"]

    # first ensure that source strings and names are lists
    if isinstance(source_string, str) \
            or (source_is_binary and isinstance(source_string, bytes)):
        source_string = [source_string]

    if isinstance(source_name, str):
        source_name = [source_name]

    if wait_on_error is not None:
        from warnings import warn
        warn("wait_on_error is deprecated and has no effect",
             DeprecationWarning)

    import os

    if cache_dir is None:
        try:
            import platformdirs as appdirs
        except ImportError:
            import appdirs

        import sys
        cache_dir = os.path.join(
            appdirs.user_cache_dir("codepy", "codepy"),
            "codepy-compiler-cache-v5-py{}".format(".".join(
                str(i) for i in sys.version_info)))

        try:
            os.makedirs(cache_dir)
        except OSError as e:
            from errno import EEXIST
            if e.errno != EEXIST:
                raise

    def get_file_md5sum(fname):
        try:
            import hashlib
            checksum = hashlib.md5()
        except ImportError:
            # for Python << 2.5
            import md5
            checksum = md5.new()

        inf = open(fname, "rb")
        checksum.update(inf.read())

        inf.close()
        return checksum.hexdigest()

    def get_dep_structure(source_paths):
        deps = list(toolchain.get_dependencies(source_paths))
        deps.sort()
        return [(dep, os.stat(dep).st_mtime, get_file_md5sum(dep))
                for dep in deps if dep not in source_paths]

    def write_source(name):
        for i, source in enumerate(source_string):
            outf = open(name[i], "w" if not source_is_binary else "wb")
            outf.write(source)
            outf.close()

    def calculate_hex_checksum():
        try:
            import hashlib
            checksum = hashlib.md5()
        except ImportError:
            # for Python << 2.5
            import md5
            checksum = md5.new()

        for source in source_string:
            if source_is_binary:
                checksum.update(source)
            else:
                checksum.update(source.encode("utf-8"))
        checksum.update(str(toolchain.abi_id()).encode("utf-8"))
        return checksum.hexdigest()

    def load_info(info_path):
        import pickle

        try:
            info_file = open(info_path, "rb")
        except OSError:
            raise _InvalidInfoFile()

        try:
            return pickle.load(info_file)
        except EOFError:
            raise _InvalidInfoFile()
        finally:
            info_file.close()

    def check_deps(deps):
        for name, date, md5sum in deps:
            try:
                possibly_updated = os.stat(name).st_mtime != date
            except OSError as e:
                if debug_recompile:
                    logger.info(
                        "recompiling because dependency %s is "
                        "inaccessible (%s).", name, e)
                return False
            else:
                if possibly_updated and md5sum != get_file_md5sum(name):
                    if debug_recompile:
                        logger.info(
                            "recompiling because dependency %s was "
                            "updated.", name)
                    return False

        return True

    def check_source(source_path):
        valid = True
        for i, path in enumerate(source_path):
            source = source_string[i]
            try:
                src_f = open(path, "r" if not source_is_binary else "rb")
            except OSError:
                if debug_recompile:
                    logger.info(
                        "recompiling because cache directory does "
                        "not contain source file '%s'.", path)
                return False

            valid = valid and src_f.read() == source
            src_f.close()

            if not valid:
                from warnings import warn
                warn("hash collision in compiler cache")
        return valid

    cleanup_m = CleanupManager()

    try:
        # Variable 'lock_m' is used for no other purpose than
        # to keep lock manager alive.
        lock_m = CacheLockManager(cleanup_m, cache_dir, sleep_delay)  # noqa

        hex_checksum = calculate_hex_checksum()
        mod_name = f"codepy.temp.{hex_checksum}.{name}"
        if object:
            suffix = toolchain.o_ext
        else:
            suffix = toolchain.so_ext

        mod_cache_dir_m = ModuleCacheDirManager(
            cleanup_m, os.path.join(cache_dir, hex_checksum))
        info_path = mod_cache_dir_m.sub("info")
        ext_file = mod_cache_dir_m.sub(name + suffix)

        if mod_cache_dir_m.existed:
            try:
                info = load_info(info_path)
            except _InvalidInfoFile:
                mod_cache_dir_m.reset()

                if debug_recompile:
                    logger.info("recompiling for invalid cache dir (%s).",
                                mod_cache_dir_m.path)
            else:
                if check_deps(info.dependencies) and check_source(
                    [mod_cache_dir_m.sub(x) for x in info.source_name]):
                    return hex_checksum, mod_name, ext_file, False
        else:
            if debug_recompile:
                logger.info("recompiling for non-existent cache dir (%s).",
                            mod_cache_dir_m.path)

        source_paths = [mod_cache_dir_m.sub(source) for source in source_name]

        write_source(source_paths)

        if object:
            toolchain.build_object(ext_file, source_paths, debug=debug)
        else:
            toolchain.build_extension(ext_file, source_paths, debug=debug)

        if info_path is not None:
            import pickle

            info_file = open(info_path, "wb")
            pickle.dump(
                _SourceInfo(dependencies=get_dep_structure(source_paths),
                            source_name=source_name), info_file)
            info_file.close()

        return hex_checksum, mod_name, ext_file, True
    except Exception:
        cleanup_m.error_clean_up()
        raise
    finally:
        cleanup_m.clean_up()
Example #17
0
def _create_built_program_from_source_cached(ctx, src, options_bytes, devices,
                                             cache_dir, include_path):
    from os.path import join

    if cache_dir is None:
        try:
            import platformdirs as appdirs
        except ImportError:
            import appdirs

        cache_dir = join(
            appdirs.user_cache_dir("pyopencl", "pyopencl"),
            "pyopencl-compiler-cache-v2-py{}".format(".".join(
                str(i) for i in sys.version_info)))

    # {{{ ensure cache directory exists

    try:
        os.makedirs(cache_dir)
    except OSError as e:
        from errno import EEXIST
        if e.errno != EEXIST:
            raise

    # }}}

    if devices is None:
        devices = ctx.devices

    cache_keys = [
        get_cache_key(device, options_bytes, src) for device in devices
    ]

    binaries = []
    to_be_built_indices = []
    logs = []
    for i, (_device, cache_key) in enumerate(zip(devices, cache_keys)):
        cache_result = retrieve_from_cache(cache_dir, cache_key)

        if cache_result is None:
            logger.debug("build program: binary cache miss (key: %s)" %
                         cache_key)

            to_be_built_indices.append(i)
            binaries.append(None)
            logs.append(None)
        else:
            logger.debug("build program: binary cache hit (key: %s)" %
                         cache_key)

            binary, log = cache_result
            binaries.append(binary)
            logs.append(log)

    message = (75 * "=" + "\n").join(
        f"Build on {dev} succeeded, but said:\n\n{log}"
        for dev, log in zip(devices, logs) if log is not None and log.strip())

    if message:
        from pyopencl import compiler_output
        compiler_output(
            "Built kernel retrieved from cache. Original from-source "
            "build had warnings:\n" + message)

    # {{{ build on the build-needing devices, in one go

    result = None
    already_built = False
    was_cached = not to_be_built_indices

    if to_be_built_indices:
        # defeat implementation caches:
        from uuid import uuid4
        src = src + "\n\n__constant int pyopencl_defeat_cache_%s = 0;" % (
            uuid4().hex)

        logger.debug(
            "build program: start building program from source on %s" %
            ", ".join(str(devices[i]) for i in to_be_built_indices))

        prg = _cl._Program(ctx, src)
        prg.build(options_bytes, [devices[i] for i in to_be_built_indices])

        logger.debug("build program: from-source build complete")

        prg_devs = prg.get_info(_cl.program_info.DEVICES)
        prg_bins = prg.get_info(_cl.program_info.BINARIES)
        prg_logs = prg._get_build_logs()

        for dest_index in to_be_built_indices:
            dev = devices[dest_index]
            src_index = prg_devs.index(dev)
            binaries[dest_index] = prg_bins[src_index]
            _, logs[dest_index] = prg_logs[src_index]

        if len(to_be_built_indices) == len(devices):
            # Important special case: if code for all devices was built,
            # then we may simply use the program that we just built as the
            # final result.

            result = prg
            already_built = True

    if result is None:
        result = _cl._Program(ctx, devices, binaries)

    # }}}

    # {{{ save binaries to cache

    if to_be_built_indices:
        cleanup_m = CleanupManager()
        try:
            try:
                CacheLockManager(cleanup_m, cache_dir)

                for i in to_be_built_indices:
                    cache_key = cache_keys[i]
                    binary = binaries[i]

                    mod_cache_dir_m = ModuleCacheDirManager(
                        cleanup_m, join(cache_dir, cache_key))
                    info_path = mod_cache_dir_m.sub("info")
                    binary_path = mod_cache_dir_m.sub("binary")
                    source_path = mod_cache_dir_m.sub("source.cl")

                    outf = open(source_path, "wt")
                    outf.write(src)
                    outf.close()

                    outf = open(binary_path, "wb")
                    outf.write(binary)
                    outf.close()

                    from pickle import dump
                    info_file = open(info_path, "wb")
                    dump(
                        _SourceInfo(dependencies=get_dependencies(
                            src, include_path),
                                    log=logs[i]), info_file)
                    info_file.close()

            except Exception:
                cleanup_m.error_clean_up()
                raise
        finally:
            cleanup_m.clean_up()

    # }}}

    return result, already_built, was_cached
Example #18
0
from platformdirs import user_cache_dir
from slugify import slugify
from termcolor import colored

try:
    # Python 3.8+
    import importlib.metadata as importlib_metadata
except ImportError:
    # Python 3.7 and lower
    import importlib_metadata

__version__ = importlib_metadata.version(__name__)

BASE_URL = "https://pypistats.org/api/"
CACHE_DIR = Path(user_cache_dir("pypistats"))
USER_AGENT = f"pypistats/{__version__}"


def _print_verbose(verbose: bool, *args, **kwargs) -> None:
    """Print if verbose"""
    if verbose:
        _print_stderr(*args, **kwargs)


def _print_stderr(*args, **kwargs) -> None:
    """Print to stderr"""
    print(*args, file=sys.stderr, **kwargs)


def _cache_filename(url: str) -> Path:
import configparser
import logging
import pathlib

import platformdirs

try:
    import pyximport

    pyximport.install(language_level=3)
    del pyximport
except ImportError:
    pass

output_path = pathlib.Path(platformdirs.user_cache_dir("supriya", "supriya"))
if not output_path.exists():
    try:
        output_path.mkdir(parents=True, exist_ok=True)
    except IOError:
        pass

config = configparser.ConfigParser()
config.read_dict({"core": {"scsynth_path": "scsynth"}})
config_path = pathlib.Path(platformdirs.user_config_dir("supriya", "supriya"))
config_path = config_path / "supriya.cfg"
if not config_path.exists():
    try:
        config_path.parent.mkdir(parents=True, exist_ok=True)
        with config_path.open("w") as file_pointer:
            config.write(file_pointer, True)
    except IOError:
Example #20
0
 def cachedir(self):
     return os.environ.get('PYFR_OMP_CACHE_DIR',
                           user_cache_dir('pyfr', 'pyfr'))
Example #21
0
    "CPythonVersionInfo",
    "DATA_URL",
    "PyPyVersionInfo",
    "UnknownVersionError",
    "VersionDatabase",
    "VersionInfo",
]

#: The default URL from which the version database is downloaded
DATA_URL = (
    "https://raw.githubusercontent.com/jwodder/pyversion-info-data/master"
    "/pyversion-info-data.v1.json"
)

#: The default directory in which the downloaded version database is cached
CACHE_DIR = user_cache_dir("pyversion-info", "jwodder")


@dataclass
class VersionDatabase:
    """
    .. versionadded:: 1.0.0

    A database of CPython and PyPy version information.  Instances are
    constructed from JSON objects following `this JSON Schema`__.

    __ https://raw.githubusercontent.com/jwodder/pyversion-info-data/master/
       pyversion-info-data.v1.schema.json
    """

    #: The date & time when the database was last updated
Example #22
0
    "F": "fatal",
}
MSG_TYPES_LONG: Dict[str, str] = {v: k for k, v in MSG_TYPES.items()}

MSG_TYPES_STATUS = {"I": 0, "C": 16, "R": 8, "W": 4, "E": 2, "F": 1}

# You probably don't want to change the MAIN_CHECKER_NAME
# This would affect rcfile generation and retro-compatibility
# on all project using [MASTER] in their rcfile.
MAIN_CHECKER_NAME = "master"

USER_HOME = os.path.expanduser("~")
# pylint: disable-next=fixme
# TODO Remove in 3.0 with all the surrounding code
OLD_DEFAULT_PYLINT_HOME = ".pylint.d"
DEFAULT_PYLINT_HOME = platformdirs.user_cache_dir("pylint")


class WarningScope:
    LINE = "line-based-msg"
    NODE = "node-based-msg"


full_version = f"""pylint {__version__}
astroid {astroid.__version__}
Python {sys.version}"""

HUMAN_READABLE_TYPES = {
    "file": "file",
    "module": "module",
    "const": "constant",
Example #23
0
class Config(MutableMapping[str, str]):
    """A dict-like object for configuration key and values"""

    pypi_url, verify_ssl = get_pypi_source()
    _config_map: Dict[str, ConfigItem] = {
        "cache_dir": ConfigItem(
            "The root directory of cached files",
            platformdirs.user_cache_dir("pdm"),
            True,
        ),
        "check_update": ConfigItem(
            "Check if there is any newer version available",
            True,
            True,
            coerce=ensure_boolean,
        ),
        "build_isolation": ConfigItem(
            "Isolate build environment from the project environment",
            True,
            False,
            "PDM_BUILD_ISOLATION",
            ensure_boolean,
        ),
        "global_project.fallback": ConfigItem(
            "Use the global project implicitly if no local project is found",
            False,
            True,
            coerce=ensure_boolean,
            replace="auto_global",
        ),
        "global_project.path": ConfigItem(
            "The path to the global project",
            os.path.expanduser("~/.pdm/global-project"),
            True,
        ),
        "global_project.user_site": ConfigItem(
            "Whether to install to user site", False, True, coerce=ensure_boolean
        ),
        "project_max_depth": ConfigItem(
            "The max depth to search for a project through the parents",
            5,
            True,
            env_var="PDM_PROJECT_MAX_DEPTH",
            coerce=int,
        ),
        "strategy.update": ConfigItem(
            "The default strategy for updating packages", "reuse", False
        ),
        "strategy.save": ConfigItem(
            "Specify how to save versions when a package is added", "minimum", False
        ),
        "strategy.resolve_max_rounds": ConfigItem(
            "Specify the max rounds of resolution process",
            10000,
            env_var="PDM_RESOLVE_MAX_ROUDNS",
            coerce=int,
        ),
        "install.parallel": ConfigItem(
            "Whether to perform installation and uninstallation in parallel",
            True,
            env_var="PDM_INSTALL_PARALLEL",
            coerce=ensure_boolean,
            replace="parallel_install",
        ),
        "install.cache": ConfigItem(
            "Cache wheel installation and only put symlinks in the library root",
            False,
            coerce=ensure_boolean,
            replace="feature.install_cache",
        ),
        "install.cache_method": ConfigItem(
            "`symlink` or `pth` to create links to the cached installation",
            "symlink",
            replace="feature.install_cache_method",
        ),
        "python.path": ConfigItem("The Python interpreter path", env_var="PDM_PYTHON"),
        "python.use_pyenv": ConfigItem(
            "Use the pyenv interpreter", True, coerce=ensure_boolean
        ),
        "python.use_venv": ConfigItem(
            "Install packages into the activated venv site packages instead of PEP 582",
            False,
            env_var="PDM_USE_VENV",
            coerce=ensure_boolean,
            replace="use_venv",
        ),
        "pypi.url": ConfigItem(
            "The URL of PyPI mirror, defaults to https://pypi.org/simple",
            pypi_url,
            env_var="PDM_PYPI_URL",
        ),
        "pypi.verify_ssl": ConfigItem(
            "Verify SSL certificate when query PyPI", verify_ssl, coerce=ensure_boolean
        ),
        "pypi.json_api": ConfigItem(
            "Consult PyPI's JSON API for package metadata",
            False,
            env_var="PDM_PYPI_JSON_API",
            coerce=ensure_boolean,
        ),
    }
    del pypi_url, verify_ssl

    @classmethod
    def get_defaults(cls) -> Dict[str, Any]:
        return {k: v.default for k, v in cls._config_map.items() if v.should_show()}

    @classmethod
    def add_config(cls, name: str, item: ConfigItem) -> None:
        """Add or modify a config item"""
        cls._config_map[name] = item

    def __init__(self, config_file: Path, is_global: bool = False):
        self._data = {}
        if is_global:
            self._data.update(self.get_defaults())

        self.is_global = is_global
        self.config_file = config_file.resolve()
        self._file_data = load_config(self.config_file)
        self.deprecated = {
            v.replace: k for k, v in self._config_map.items() if v.replace
        }
        self._data.update(self._file_data)

    def _save_config(self) -> None:
        """Save the changed to config file."""
        self.config_file.parent.mkdir(parents=True, exist_ok=True)
        toml_data: Dict[str, Any] = {}
        for key, value in self._file_data.items():
            *parts, last = key.split(".")
            temp = toml_data
            for part in parts:
                if part not in temp:
                    temp[part] = {}
                temp = temp[part]
            temp[last] = value

        with self.config_file.open("w", encoding="utf-8") as fp:
            tomlkit.dump(toml_data, fp)  # type: ignore

    def __getitem__(self, key: str) -> Any:
        if key not in self._config_map and key not in self.deprecated:
            raise NoConfigError(key)
        config_key = self.deprecated.get(key, key)
        config = self._config_map[config_key]
        env_var = config.env_var
        if env_var is not None and env_var in os.environ:
            result = os.environ[env_var]
        else:
            if config_key in self._data:
                result = self._data[config_key]
            elif config.replace:
                result = self._data[config.replace]
            else:
                raise NoConfigError(key) from None
        return config.coerce(result)

    def __setitem__(self, key: str, value: Any) -> None:
        if key not in self._config_map and key not in self.deprecated:
            raise NoConfigError(key)
        config_key = self.deprecated.get(key, key)
        config = self._config_map[config_key]
        if not self.is_global and config.global_only:
            raise ValueError(
                f"Config item '{key}' is not allowed to set in project config."
            )

        value = config.coerce(value)
        env_var = config.env_var
        if env_var is not None and env_var in os.environ:
            click.echo(
                termui.yellow(
                    "WARNING: the config is shadowed by env var '{}', "
                    "the value set won't take effect.".format(env_var)
                )
            )
        self._data[config_key] = value
        self._file_data[config_key] = value
        if config.replace:
            self._data.pop(config.replace, None)
            self._file_data.pop(config.replace, None)
        self._save_config()

    def __len__(self) -> int:
        return len(self._data)

    def __iter__(self) -> Iterator[str]:
        keys: Set[str] = set()
        for key in self._data:
            if key in self._config_map:
                keys.add(key)
            elif key in self.deprecated:
                keys.add(self.deprecated[key])
        return iter(keys)

    def __delitem__(self, key: str) -> None:
        config_key = self.deprecated.get(key, key)
        config = self._config_map[config_key]
        self._data.pop(config_key, None)
        self._file_data.pop(config_key, None)
        if self.is_global and config.should_show():
            self._data[config_key] = config.default
        if config.replace:
            self._data.pop(config.replace, None)
            self._file_data.pop(config.replace, None)

        env_var = config.env_var
        if env_var is not None and env_var in os.environ:
            click.echo(
                termui.yellow(
                    "WARNING: the config is shadowed by env var '{}', "
                    "set value won't take effect.".format(env_var)
                )
            )
        self._save_config()
Example #24
0
import tempfile
from typing import Dict, Iterable, Set, Tuple

from platformdirs import user_cache_dir

from black.mode import Mode

from _black_version import version as __version__

# types
Timestamp = float
FileSize = int
CacheInfo = Tuple[Timestamp, FileSize]
Cache = Dict[str, CacheInfo]

CACHE_DIR = Path(user_cache_dir("tan", version=__version__))


def read_cache(mode: Mode) -> Cache:
    """Read the cache if it exists and is well formed.

    If it is not well formed, the call to write_cache later should resolve the issue.
    """
    cache_file = get_cache_file(mode)
    if not cache_file.exists():
        return {}

    with cache_file.open("rb") as fobj:
        try:
            cache: Cache = pickle.load(fobj)
        except (pickle.UnpicklingError, ValueError, IndexError):