def load_toml(path): if tomli.__version__ < "1.2.0": with open(path, "r") as f: return tomli.load(f) else: with open(path, "rb") as f: return tomli.load(f)
def check_fingerprint(project: Project, filename: PathLike) -> bool: import tomli with open(filename, "rb") as fp: try: tomli.load(fp) except ValueError: # the file should be a requirements.txt if it not a TOML document. return True else: return False
def convert( project: Project, filename: PathLike, options: Namespace | None ) -> tuple[dict[str, Any], dict[str, Any]]: with open(filename, "rb") as fp: data = tomli.load(fp) result = {} settings = {} if "pipenv" in data: settings["allow_prereleases"] = data["pipenv"].get("allow_prereleases", False) if "requires" in data: python_version = data["requires"].get("python_full_version") or data[ "requires" ].get("python_version") result["requires-python"] = f">={python_version}" if "source" in data: settings["source"] = data["source"] result["dependencies"] = make_array( # type: ignore [ convert_pipfile_requirement(k, req) for k, req in data.get("packages", {}).items() ], True, ) settings["dev-dependencies"] = { "dev": make_array( [ convert_pipfile_requirement(k, req) for k, req in data.get("dev-packages", {}).items() ], True, ) } return result, settings
def search_pyodide_root(curdir: str | Path, *, max_depth: int = 5) -> Path: """ Recursively search for the root of the Pyodide repository, by looking for the pyproject.toml file in the parent directories which contains [tool.pyodide] section. """ # We want to include "curdir" in parent_dirs, so add a garbage suffix parent_dirs = (Path(curdir) / "garbage").parents[:max_depth] for base in parent_dirs: pyproject_file = base / "pyproject.toml" if not pyproject_file.is_file(): continue try: with pyproject_file.open("rb") as f: configs = tomli.load(f) except tomli.TOMLDecodeError: raise ValueError(f"Could not parse {pyproject_file}.") if "tool" in configs and "pyodide" in configs["tool"]: return base raise FileNotFoundError( "Could not find Pyodide root directory. If you are not in the Pyodide directory, set `PYODIDE_ROOT=<pyodide-root-directory>`." )
def ensure_config(quiet: bool) -> Config: config_path = Path(click.get_app_dir(APP_NAME)) if not config_path.exists(): config_path.mkdir(parents=True) config_file = config_path / CONFIG_NAME default_packages_file = Path.home() / DEFAULT_PACKAGES_NAME if not config_file.is_file(): if not quiet: click.secho("Config file not found...", fg="bright_black") click.secho(f"Creating default config {config_file}", fg="bright_black") copyfile( Path(__file__).parent / DEFAULTS_PATH / CONFIG_NAME, config_file) with config_file.open("rb") as opened: config = tomli.load(opened) if not expand_path(config["packages_location"]).is_file(): if not quiet: click.secho("Packages file not found...", fg="bright_black") click.secho(f"Creating default {default_packages_file}", fg="bright_black") click.secho("You can change it later in the config.", fg="bright_black") copyfile( Path(__file__).parent / DEFAULTS_PATH / DEFAULT_PACKAGES_NAME, default_packages_file, ) # Transform sections sections = { section["name"]: Section(**section) for section in config.pop("sections") } return Config(**config, sections=sections)
def load_config_file(self, path: Union[str, pathlib.Path]) -> None: """ Load a configuration file in one of the supported formats, and merge it with the current config if it exists. Parameters ---------- path: Union[str, pathlib.Path] config file to load. [yaml, toml, json, py] formats are supported """ path = pathlib.Path(path) if path.suffix in [".yaml", ".yml"]: # do our own YAML loading with open(path, "r") as infile: config = Config(yaml.safe_load(infile)) self.update_config(config) elif path.suffix == ".toml" and HAS_TOML: with open(path, "rb") as infile: config = Config(toml.load(infile)) self.update_config(config) else: # fall back to traitlets.config.Application's implementation super().load_config_file(str(path)) Provenance().add_input_file(path, role="Tool Configuration")
def get_requires_python_str(package_dir: Path) -> Optional[str]: """Return the python requires string from the most canonical source available, or None""" # Read in from pyproject.toml:project.requires-python try: with (package_dir / "pyproject.toml").open("rb") as f1: info = tomli.load(f1) return str(info["project"]["requires-python"]) except (FileNotFoundError, KeyError, IndexError, TypeError): pass # Read in from setup.cfg:options.python_requires try: config = ConfigParser() config.read(package_dir / "setup.cfg") return str(config["options"]["python_requires"]) except (FileNotFoundError, KeyError, IndexError, TypeError): pass try: with (package_dir / "setup.py").open(encoding="utf8") as f2: return setup_py_python_requires(f2.read()) except FileNotFoundError: pass return None
def test_defaults(platform, intercepted_build_args): main() build_options: BuildOptions = intercepted_build_args.args[0].build_options(identifier=None) defaults_config_path = resources_dir / "defaults.toml" with defaults_config_path.open("rb") as f: defaults_toml = tomli.load(f) root_defaults = defaults_toml["tool"]["cibuildwheel"] platform_defaults = defaults_toml["tool"]["cibuildwheel"][platform] defaults = {} defaults.update(root_defaults) defaults.update(platform_defaults) # test a few options assert build_options.before_all == defaults["before-all"] repair_wheel_default = defaults["repair-wheel-command"] if isinstance(repair_wheel_default, list): repair_wheel_default = " && ".join(repair_wheel_default) assert build_options.repair_command == repair_wheel_default assert build_options.build_frontend == defaults["build-frontend"] if platform == "linux": assert build_options.manylinux_images pinned_images = _get_pinned_docker_images() default_x86_64_image = pinned_images["x86_64"][defaults["manylinux-x86_64-image"]] assert build_options.manylinux_images["x86_64"] == default_x86_64_image
def read_toml_configuration_settings(settings: BaseSettings) -> Dict[str, Any]: """Read the configuration file(s) Parameters ---------- settings: BaseSettings A BaseSettings instance Returns ------- Dict[str, Any] A dict containing the data from the read out configuration file(s) """ output_dict: Dict[str, Any] = {} config_files: List[Path] = [] if defaults.SETTINGS_LOCATION.exists(): config_files += [defaults.SETTINGS_LOCATION] if defaults.SETTINGS_OVERRIDE_LOCATION.exists(): config_files += sorted( defaults.SETTINGS_OVERRIDE_LOCATION.glob("*.toml")) for config_file in config_files: with open(config_file, "rb") as file: output_dict | tomli.load(file) return output_dict
def test_init(): responses = [ 'foo', # Module name 'Test Author', # Author '*****@*****.**', # Author email 'http://example.com/', # Home page '1' # License (1 -> MIT) ] with TemporaryDirectory() as td, \ patch_data_dir(), \ faking_input(responses): ti = init.TerminalIniter(td) ti.initialise() generated = Path(td) / 'pyproject.toml' assert_isfile(generated) with generated.open('rb') as f: data = tomli.load(f) assert data['project']['authors'][0]['email'] == "*****@*****.**" license = Path(td) / 'LICENSE' assert_isfile(license) with license.open() as f: license_text = f.read() assert license_text.startswith("The MIT License (MIT)") assert "{year}" not in license_text assert "Test Author" in license_text
def __init__(self, src_dir: str | Path, environment: Environment) -> None: """If isolated is True(default), the builder will set up a *clean* environment. Otherwise, the environment of the host Python will be used. """ self._env = environment self.executable = self._env.interpreter.executable.as_posix() self.src_dir = src_dir self.isolated = environment.project.config["build_isolation"] logger.debug("Preparing isolated env for PEP 517 build...") try: with open(os.path.join(src_dir, "pyproject.toml"), "rb") as f: spec = tomli.load(f) except FileNotFoundError: spec = {} except Exception as e: raise BuildError(e) from e build_system = spec.get("build-system", self.DEFAULT_BACKEND) if "build-backend" not in build_system: build_system["build-backend"] = self.DEFAULT_BACKEND[ "build-backend"] if "requires" not in build_system: raise BuildError( "Missing 'build-system.requires' in pyproject.toml") self.init_build_system(build_system)
def check(source_dir): pyproject = pjoin(source_dir, 'pyproject.toml') if isfile(pyproject): log.info('Found pyproject.toml') else: log.error('Missing pyproject.toml') return False try: with open(pyproject, 'rb') as f: pyproject_data = tomli.load(f) # Ensure the mandatory data can be loaded buildsys = pyproject_data['build-system'] requires = buildsys['requires'] backend = buildsys['build-backend'] backend_path = buildsys.get('backend-path') log.info('Loaded pyproject.toml') except (tomli.TOMLDecodeError, KeyError): log.error("Invalid pyproject.toml", exc_info=True) return False hooks = Pep517HookCaller(source_dir, backend, backend_path) sdist_ok = check_build_sdist(hooks, requires) wheel_ok = check_build_wheel(hooks, requires) if not sdist_ok: log.warning('Sdist checks failed; scroll up to see') if not wheel_ok: log.warning('Wheel checks failed') return sdist_ok
def convert( project: Project, filename: Path, options: Optional[Namespace] ) -> Tuple[Mapping[str, Any], Mapping[str, Any]]: with open(filename, "rb") as fp: converter = LegacyMetaConverter( tomli.load(fp)["tool"]["pdm"], project.core.ui) return converter.convert()
def test_init_readme_found_yes_choosen(): responses = [ 'test_module_name', 'Test Author', '*****@*****.**', '', # Home page omitted '4', # Skip - choose a license later ] with make_dir(["readme.md"]) as td, \ patch_data_dir(), \ faking_input(responses): ti = init.TerminalIniter(td) ti.initialise() with Path(td, 'pyproject.toml').open('rb') as f: data = tomli.load(f) assert data['project'] == { 'authors': [{ 'name': 'Test Author', 'email': '*****@*****.**' }], 'name': 'test_module_name', 'readme': 'readme.md', 'dynamic': ['version', 'description'], }
def read_python_configs(config: PlatformName) -> List[Dict[str, str]]: input_file = resources_dir / "build-platforms.toml" with input_file.open("rb") as f: loaded_file = tomli.load(f) results: List[Dict[str, str]] = list( loaded_file[config]["python_configurations"]) return results
def test_author_email_field_is_optional(): responses = [ 'test_module_name', 'Test Author', '', # Author-email field is skipped 'https://www.example.org', '4', ] with TemporaryDirectory() as td, \ patch_data_dir(), \ faking_input(responses): ti = init.TerminalIniter(td) ti.initialise() with Path(td, 'pyproject.toml').open('rb') as f: data = tomli.load(f) assert not Path(td, 'LICENSE').exists() assert data['project'] == { 'authors': [{ 'name': 'Test Author' }], 'name': 'test_module_name', 'urls': { 'Home': 'https://www.example.org' }, 'dynamic': ['version', 'description'], }
def test_init_homepage_validator(): responses = [ 'test_module_name', 'Test Author', '*****@*****.**', 'www.uh-oh-spagghetti-o.com', # fails validation 'https://www.example.org', # passes '4', # Skip - choose a license later ] with TemporaryDirectory() as td, \ patch_data_dir(), \ faking_input(responses): ti = init.TerminalIniter(td) ti.initialise() with Path(td, 'pyproject.toml').open('rb') as f: data = tomli.load(f) assert data['project'] == { 'authors': [{ 'name': 'Test Author', 'email': '*****@*****.**' }], 'name': 'test_module_name', 'urls': { 'Home': 'https://www.example.org' }, 'dynamic': ['version', 'description'], }
def parse_config(config_files): for config_file in config_files: log.notice('Loading configuration file: %s', config_file) try: with open(config_file, 'rb') as f: config = tomli.load(f) for key in config: if key not in valid_config_file_values: log.critical("Unknown option '%s' in config file %s", key, config_file) conf_values.update(config) except Exception as e: log.critical('Could not find parse configuration file: %s: %s', config_file, e) # print out any options messages for opt in conf_values['options']: if opt in option_messages: log.info(option_messages[opt]) if "envscript" in conf_values: log.info('Envscript support enabled.') # take care of any variable substitutions that may be left for x in list(conf_values): if isinstance(conf_values[x], str): conf_values[x] = conf_values[x] % conf_values
def __init__(self, src_dir: str | Path, environment: Environment) -> None: self._env = environment self.executable = self._env.interpreter.executable self.src_dir = src_dir logger.debug("Preparing isolated env for PEP 517 build...") try: with open(os.path.join(src_dir, "pyproject.toml"), "rb") as f: spec = tomli.load(f) except FileNotFoundError: spec = {} except Exception as e: raise BuildError(e) from e build_system = spec.get("build-system", self.DEFAULT_BACKEND) if "build-backend" not in build_system: build_system["build-backend"] = self.DEFAULT_BACKEND[ "build-backend"] if "requires" not in build_system: raise BuildError( "Missing 'build-system.requires' in pyproject.toml") self.init_build_system(build_system) self._prefix = _Prefix( self.executable, shared=self.get_shared_env(hash(frozenset(self._requires))), overlay=self.get_overlay_env( os.path.normcase(self.src_dir).rstrip("\\/")), )
def main(): with open("conf.toml", "rb") as f: try: toml_dict = tomli.load(f) except tomli.TOMLDecodeError: print("TOML File is not valid") print(toml_dict)
def load_from_file(self) -> None: """Load new settings from the file path of the settings object.""" log.info(f"Loading new settings from {self.path}.") file_settings: RecursiveDefaultDict[str, t.Any] = RecursiveDefaultDict() with self.path.open("rb") as settings_file: file_settings.update_from_dict_recursive(tomli.load(settings_file)) self._settings_dict = file_settings
def convert( project: Optional[Project], filename: PathLike, options: Optional[Namespace] ) -> Tuple[Mapping, Mapping]: with open(filename, "rb") as fp, cd(os.path.dirname(os.path.abspath(filename))): converter = FlitMetaConverter( tomli.load(fp)["tool"]["flit"], project.core.ui if project else None ) return converter.convert()
def load_system(source_dir): """ Load the build system from a source dir (pyproject.toml). """ pyproject = os.path.join(source_dir, 'pyproject.toml') with open(pyproject, 'rb') as f: pyproject_data = tomli.load(f) return pyproject_data['build-system']
def check_fingerprint(project: Project | None, filename: Path | str) -> bool: with open(filename, "rb") as fp: try: data = tomli.load(fp) except tomli.TOMLDecodeError: return False return "tool" in data and "poetry" in data["tool"]
def check_fingerprint(project: Optional[Project], filename: PathLike) -> bool: with open(filename, "rb") as fp: try: data = tomli.load(fp) except tomli.TOMLDecodeError: return False return "tool" in data and "flit" in data["tool"]
def load(cls, toml_path: Path): with toml_path.open("rb") as f: data = tomli.load(f) return cls({ uuid: TestCaseTOML(uuid, *opts) for uuid, opts in data.items() if opts.get('include', None) is not False })
def check_fingerprint(project: Project, filename: PathLike) -> bool: with open(filename, "rb") as fp: try: data = tomli.load(fp) except tomli.TOMLDecodeError: return False return ("tool" in data and "pdm" in data["tool"] and "dependencies" in data["tool"]["pdm"])
def read_typeshed_stub_metadata(stub_path: Path) -> StubInfo: with (stub_path / "METADATA.toml").open("rb") as f: meta = tomli.load(f) return StubInfo( distribution=stub_path.name, version_spec=meta["version"], obsolete="obsolete_since" in meta, no_longer_updated=meta.get("no_longer_updated", False), )
def get_hooks(pkg, backend=None, path=None): source_dir = pjoin(SAMPLES_DIR, pkg) with open(pjoin(source_dir, 'pyproject.toml'), 'rb') as f: data = tomli.load(f) if backend is None: backend = data['build-system']['build-backend'] if path is None: path = data['build-system']['backend-path'] return Pep517HookCaller(source_dir, backend, path)
def load_file( file: str, *, use_keymanager: bool, ) -> Tuple[Entries, Options]: try: with open(file, 'rb') as f: config = tomli.load(f) except (OSError, tomli.TOMLDecodeError) as e: raise FileLoadError('version configuration file', file, e) ver_files: Optional[Tuple[Path, Path]] = None keymanager = KeyManager(None) source_configs = {} if '__config__' in config: c = config.pop('__config__') d = Path(file).parent if 'oldver' in c and 'newver' in c: oldver_s = os.path.expandvars(os.path.expanduser(c.get('oldver'))) oldver = d / oldver_s newver_s = os.path.expandvars(os.path.expanduser(c.get('newver'))) newver = d / newver_s ver_files = oldver, newver if use_keymanager: keyfile = c.get('keyfile') if keyfile: keyfile_s = os.path.expandvars( os.path.expanduser(c.get('keyfile'))) keyfile = d / keyfile_s keymanager = KeyManager(keyfile) if 'source' in c: source_configs = c['source'] max_concurrency = c.get('max_concurrency', 20) proxy = c.get('proxy') httplib = c.get('httplib', None) http_timeout = c.get('http_timeout', 20) else: max_concurrency = 20 proxy = None httplib = None http_timeout = 20 return cast(Entries, config), Options( ver_files, max_concurrency, proxy, keymanager, source_configs, httplib, http_timeout, )