def split_crate(parent_dir, crate_file): # crate_file is an os.DirEntry that points to an old-style merged manifests file print(f" Splitting {crate_file.path}...") try: crates = rtoml.load(Path(crate_file.path)) name = crate_file.name.split('.')[0] crate_dirname = os.path.join(parent_dir, name) if not os.path.isdir(crate_dirname): os.mkdir(crate_dirname) general = crates.pop("general") if "external" in crates: write_externals(crate_dirname, name, general, crates.pop("external")) # remaining top-level tables must be versions (general was already pop'd) for version in crates: write_release(crate_dirname, name, general, fix_ver(version), crates[version]) # Clean up unneeded merged manifest os.remove(crate_file) except rtoml.TomlParsingError as ex: print(f"FAILED to migrate {crate_file}: " + str(ex)) raise
def _specifies_build_tool(self, build_tool: str) -> bool: """ Generalised method to check for a particular build tool in `pyproject.toml` Does more than a naive search for `build_tool`, actually checks the appropriate toml construction so if this method returns True, caller can be confident that `pyproject.toml` is valid. Args: build_tool (str): The build tool to check for e.g. `flit`, `poetry`. Returns: bool: True if `pyproject.toml` specifies that build tool, else False. """ # If it doesn't have a pyproject.toml, bail early if not self.has_pyproject_toml(): return False with open(self.local_path.joinpath("pyproject.toml")) as file: toml = rtoml.load(file) if build_system := toml.get("build-system"): if build_backend := build_system.get("build-backend"): return build_tool in build_backend.strip().lower()
def fix_manifest(file): # file is the absolute path to a manifest print(f" Patching {file}...") try: manifest = rtoml.load(Path(file)) except rtoml.TomlParsingError as ex: print(f"FAILED to patch {file}: " + str(ex)) raise if "origin" not in manifest: print(f" WARNING: manifest without origin? {file}") return origin = dict() url = fix_url(manifest.pop("origin")) # Move any '@commit' info to a separate key origin["url"] = url.split('@')[0] if '@' in url: origin["commit"] = url.split('@')[1] if "origin-hashes" in manifest: origin["hashes"] = manifest.pop("origin-hashes") if "archive-name" in manifest: origin["archive-name"] = manifest.pop("archive-name") manifest["origin"] = origin # Rewrite patched manifest with open(file, "wt") as file: rtoml.dump(utils.fix_manifest_order(manifest), file)
def main(reservoir, input_name, delays, alphas, plant_number): variable_info = toml.load(open("variables.toml")) analysis_dir = "data/Vegetative_stages/analysis-{}plants/".format( plant_number) if not os.path.isdir(analysis_dir): os.makedirs(analysis_dir) categories = reservoir.split("+") variables = [] for k in variable_info: if variable_info[k]["category"] in categories: variables.append(k) selected_variables = [] for v in variables: for o in output_names: if (v + "_") in o: selected_variables.append(o) if len(selected_variables) < 10: print( f"Not enough variables for category {reservoir} (only {len(selected_variables)} variables)." ) return X = df.loc[:, selected_variables] y = df[input_name].values print(reservoir, input_name) analysis_data = grid_analysis(delays=delays, alphas=alphas, X=X, y=y) print("Saving CSV file.") analysis_data.to_csv( os.path.join(analysis_dir, f"{reservoir}-{input_name}.csv"))
def load(name: str) -> Corpus: toml_path = os.path.join(constants.corpora_path, name, "corpus.toml") with open(toml_path) as toml_file: corpus_dict = toml.load(toml_file) corpus_by_type = {corpus.corpus_type: corpus for corpus in (RedditCorpus,)} # type: ignore corpus_cls = corpus_by_type[corpus_dict["type"]] del corpus_dict["type"] return corpus_cls.from_dict({"name": name, **corpus_dict})
def test_to_toml_file(tmp_path): import rtoml a = Diot(**test_dict) tmp_toml_file = tmp_path / 'diot_test_to_toml.toml' a.to_toml(tmp_toml_file) with open(tmp_toml_file) as f: data = rtoml.load(f) assert data == { key: val for key, val in test_dict.items() if key != 'diot_nest' }
def __init__(self, *args: Any, **kwargs: Any): tk.Tk.__init__(self, *args, **kwargs) self.title("PogNLP") self.tkt = TkThread(self) self.grid_columnconfigure(0, weight=1) self.grid_rowconfigure(0, weight=1) self.current_frame = util.Observable[str]("HomeView") self.reports = util.Observable[Dict[str, Report]]({ report_name: Report.load(report_name) for report_name in Report.ls() }) self.corpora = util.Observable[Dict[str, Corpus]]({ corpus_name: Corpus.load(corpus_name) for corpus_name in Corpus.ls() }) self.lexica = util.Observable[Dict[str, Union[ Lexicon, DefaultLexicon]]]({ lexicon_name: Lexicon.load(lexicon_name) for lexicon_name in Lexicon.ls() }) self.current_report = util.Observable[Optional[str]](None) self.current_lexicon = util.Observable[Optional[str]](None) # Try to load last-used Reddit API credentials from settings.toml try: with open(constants.settings_path, "r", encoding="utf-8") as settings_file: settings = toml.load(settings_file) except (FileNotFoundError, toml.TomlParsingError): settings = { "REDDIT_CLIENT_ID": None, "REDDIT_CLIENT_SECRET": None, } self.settings = util.Observable[Dict[str, Any]](settings) # Write new settings.toml to disk whenever settings is updated def persist_settings(settings: Dict[str, Any]) -> None: with open(constants.settings_path, "w", encoding="utf-8") as settings_file: toml.dump(settings, settings_file) self.settings.subscribe(persist_settings, call=False) view = AppView(self, self, current_frame=self.current_frame) view.grid(row=0, column=0, sticky="nesw") tk.Tk.report_callback_exception = self.show_error
def load(name: str) -> Corpus: """Load a corpus from disk given its name""" toml_path = os.path.join(constants.corpora_path, name, "corpus.toml") with open(toml_path, encoding="utf-8") as toml_file: corpus_dict = toml.load(toml_file) corpus_by_type = { corpus.corpus_type: corpus for corpus in (RedditCorpus, ) } corpus_class = corpus_by_type[corpus_dict["type"]] del corpus_dict["type"] return corpus_class.from_dict({"name": name, **corpus_dict})
def load_config(path: Path) -> Config: """Load configuration from file.""" data = rtoml.load(path) log_level = _get_log_level(data) http_config = _get_http_config(data) irc_config = _get_irc_config(data) return Config( log_level=log_level, http=http_config, irc=irc_config, )
async def get_current_versions() -> dict[str, str]: """ Extracts all the current versions of all dependencies from pyproject.toml Returns: dict[str, str]: Map of project: version """ with open(PYPROJECT_TOML, encoding="utf-8") as f: contents = rtoml.load(f) all_deps: list[str] = [] results: dict[str, str] = {} if dependencies := contents.get("project", {}).get("dependencies"): all_deps.extend(dependencies)
def parse_config_file(path: pathlib.Path) -> Dict[str, Any]: """ Parse the configuration file. Parameters ---------- path : pathlib.Path Path to the configuration file. Returns ------- Dict[str, Any] Dictionary of the configuration file. """ with path.open() as file_obj: config = rtoml.load(file_obj) return config
def load(cls, path: Path = defaults.CONFIG_FILE) -> Config: """ Reads in the ~/.pytoil.toml config file and returns a populated `Config` object. Args: path (Path, optional): Path to the config file. Defaults to defaults.CONFIG_FILE. Returns: Config: Populated `Config` object. Raises: FileNotFoundError: If config file not found. """ try: with open(path, encoding="utf-8") as f: config_dict: dict[str, Any] = rtoml.load(f).get("pytoil", "") except FileNotFoundError: raise else: return Config(**config_dict)
def is_index(path): # Look for an "index.toml" file that contains a matching 'version = "x.x"' target = os.path.join(path, "index.toml") if not os.path.isfile(target): return False with open(target) as file: try: contents = rtoml.load(file) if "version" in contents: version = contents["version"] if version == FROM_VERSION: return True else: print( f"Version mismatch: {path} version is {version}, expected {FROM_VERSION}" ) return False else: print(f"Malformed index file: no version found inside {path}") except rtoml.TomlParsingError: print(f"Not a target: {target} failed to load as TOML") raise
def load(name: str) -> Report: toml_path = os.path.join(constants.reports_path, name, toml_name) with open(toml_path) as toml_file: return Report(name=name, **toml.load(toml_file))
def test_load_path(tmp_path): p = tmp_path / 'test.toml' p.write_text('foo = "bar"') assert rtoml.load(p) == {'foo': 'bar'}
def load_config(): toml_path = Path(__file__).parent / 'config.toml' return rtoml.load(toml_path)
def test_invalid_type(): with pytest.raises( TypeError, match="invalid toml input, must be str not <class 'bytes'>"): rtoml.load(b'foobar')
def test_load_file(tmp_path): p = tmp_path / 'test.toml' p.write_text('foo = "bar"') with p.open() as f: assert rtoml.load(f) == {'foo': 'bar'}
def load(name: str) -> Report: """Load a report from disk given its name""" toml_path = os.path.join(constants.reports_path, name, TOML_NAME) with open(toml_path, encoding="utf-8") as toml_file: return Report(name=name, **toml.load(toml_file))
'database': { 'connection_max': 5000, 'enabled': True, 'ports': [8001, 8001, 8002], 'server': '192.168.1.1', }, } loaded_obj = rtoml.load("""\ # This is a TOML document. title = "TOML Example" [owner] name = "Tom Preston-Werner" dob = 1979-05-27T07:32:00-08:00 # First class dates [database] server = "192.168.1.1" ports = [8001, 8001, 8002] connection_max = 5000 enabled = true """) assert loaded_obj == obj assert rtoml.dumps(obj) == """\ title = "TOML Example" [owner] dob = 1979-05-27T07:32:00-08:00
def test_invalid_toml(): m = r'^invalid TOML value, did you mean to use a quoted string\? at line 1 column 5$' with pytest.raises(rtoml.TomlParsingError, match=m): rtoml.load('x = y')
def test_load_str(): assert rtoml.load('foo = "bar"') == {'foo': 'bar'}
def load(name: str) -> Lexicon: toml_path = os.path.join(constants.lexica_path, name, toml_name) with open(toml_path) as toml_file: return Lexicon(name=name, **toml.load(toml_file))
def list_datasets(): """Get the information of all datasets""" with HERE.joinpath("metadata.toml").open() as fmd: return rtoml.load(fmd)
from pathlib import Path import rtoml from araneae.config import settings from araneae.modules.ac.ac_deps import get_authorized_user from araneae.modules.users.users_models import User from fastapi import Depends, Request from fastapi_module import InferringRouter, controller router = InferringRouter(tags=["meta"]) pyproject = rtoml.load( Path(__file__).parent.joinpath("../../../pyproject.toml")) @controller(router, version=1) class MetaController: @router.get("/ping") def pong(self, req: Request) -> dict: return { "message": "PONG", "environment": settings.python_env, "root_path": req.scope.get("root_path"), "version": pyproject["tool"]["poetry"]["version"], } @router.get("/config") def read_config(self, me: User = Depends(get_authorized_user)) -> dict: return settings.dict()
def test_invalid_toml(): with pytest.raises(rtoml.TomlParsingError, match='invalid number at line 1 column 5'): rtoml.load('x = y')
def test_load(input_toml, output_obj): assert rtoml.load(input_toml) == output_obj