async def init(ctx: Context, tortoise_orm, location, src_folder): config_file = ctx.obj["config_file"] if os.path.isabs(src_folder): src_folder = os.path.relpath(os.getcwd(), src_folder) # Add ./ so it's clear that this is relative path if not src_folder.startswith("./"): src_folder = "./" + src_folder # check that we can find the configuration, if not we can fail before the config file gets created add_src_path(src_folder) get_tortoise_config(ctx, tortoise_orm) if Path(config_file).exists(): with open(config_file, "r") as f: content = f.read() doc = tomlkit.parse(content) else: doc = tomlkit.parse("[tool.aerich]") table = tomlkit.table() table["tortoise_orm"] = tortoise_orm table["location"] = location table["src_folder"] = src_folder doc["tool"]["aerich"] = table with open(config_file, "w") as f: f.write(tomlkit.dumps(doc)) Path(location).mkdir(parents=True, exist_ok=True) click.secho(f"Success create migrate location {location}", fg=Color.green) click.secho(f"Success write config to {config_file}", fg=Color.green)
def test_end_to_end_using_pyproject_toml(test_repo: Path) -> None: tbump_toml_path = test_repo / "tbump.toml" # Convert tbump config to a config inside a tool.tbump section: tbump_config = tomlkit.loads(tbump_toml_path.read_text()) tools_config = tomlkit.table() tools_config.add("tbump", tbump_config) pyproject_config = tomlkit.table() pyproject_config.add("tool", tools_config) to_write = tomlkit.dumps(pyproject_config) # Write the pyproject.toml and remove tbump.toml pyproject_toml_path = test_repo / "pyproject.toml" pyproject_toml_path.write_text(to_write) tbump_toml_path.unlink() tbump.git.run_git(test_repo, "add", ".") tbump.git.run_git(test_repo, "commit", "--message", "move tbump config inside pyproject.toml") _, previous_commit = tbump.git.run_git_captured(test_repo, "rev-parse", "HEAD") tbump.main.main( ["-C", str(test_repo), "1.2.41-alpha-2", "--non-interactive"]) assert bump_done(test_repo, previous_commit, using_pyproject=True)
def serialize(datastructure, format_hint): """Serialize the datastructure to either YAML, TOML, JSON or PLIST string (or bytes). Args: datastructure (any): The datastructure to serialize into a string using 'format_hint' format. format_hint (str): see 'unserialize'. """ if format_hint == 'toml': return TOML.dumps(datastructure) elif format_hint == 'yaml': return yaml_serialize(datastructure)[:-1] elif format_hint == 'json': output = JSON.dumps(datastructure, indent=2) # Remove whitespace before line breaks (needed for Python2.7) return output.replace(' \n', '\n') elif format_hint == 'plist' or format_hint == 'plist_binary': write_as_binary = format_hint == 'plist_binary' output = PLIST.writePlistToString(datastructure, write_as_binary) # Remove whitespace before line breaks (needed for Python2.7) return output.rstrip()
def _contents_of_input_file(infile, set): params = load_model_params(defaults=SequenceModel.DEFAULT_PARAMS, dotted_params=set) def as_csv(data, header=None): with StringIO() as fp: np.savetxt(fp, data, header=header, delimiter=",", fmt="%.1f") contents = fp.getvalue() return contents contents = { "sequence.yaml": yaml.dump(params, default_flow_style=False), "sequence.toml": toml.dumps(dict(sequence=dict(_time=0.0, **params))), "bathymetry.csv": as_csv([[0.0, 20.0], [100000.0, -80.0]], header="X [m], Elevation [m]"), "sealevel.csv": as_csv([[0.0, 0.0], [200000, -10]], header="Time [y], Sea-Level Elevation [m]"), "subsidence.csv": as_csv( [[0.0, 0], [30000.0, 0], [35000.0, 0], [50000.0, 0], [100000.0, 0] ], header="X [x], Subsidence Rate [m / y]", ), } for section, section_params in params.items(): contents[f"sequence.{section}"] = yaml.dump(section_params, default_flow_style=False) return contents[infile]
def main(): with open(PYPROJECT_TOML_PATH) as f: pyproject_contents = tomlkit.loads(f.read()) deps = pyproject_contents["tool"]["poetry"]["dependencies"] extras = {} for key in deps: value = deps[key] comment = value.trivia.comment if comment.startswith("# groups"): split = comment.split("=") assert len(split) == 2 groups = json.loads(split[-1]) for group in groups: try: extras[group].append(key) except KeyError: extras[group] = [key] pyproject_contents["tool"]["poetry"]["extras"] = extras with open(PYPROJECT_TOML_PATH, "w") as f: f.write(tomlkit.dumps(pyproject_contents))
def write_instance(instance_configuration): """Write a new or updated instance""" instance_name = instance_configuration["name"] instance_file = os.path.join( get_etc_instance_path(), instance_name + ".conf" ) instance_directory = os.path.join(get_etc_instance_path(), instance_name) try: log("Configuration:", instance_configuration, pretty=True, lvl=debug) with open(instance_file, "w") as f: f.write(dumps(instance_configuration)) log("Instance configuration stored.", lvl=debug) if not os.path.exists(instance_directory): os.mkdir(instance_directory) log("Instance configuration directory created.", lvl=debug) except PermissionError: log( "PermissionError: Could not write instance management configuration " "file or create instance configuration directory.", lvl=error, ) abort(EXIT_NO_PERMISSION)
def saveSwatch_TOML(fileName: str, colourSwatch: ColourSwatch): """ Save a colour swatch as .TOML """ with open(fileName, "w") as fileData: tomldict = {"scheme": colourSwatch.name, "author": colourSwatch.author} for colour in colourSwatch.colours: tomldict[colour.name] = "".join(colour.getRGB255Hex()) fileData.write(tomlkit.dumps(tomldict))
def add_hook(test_repo: Path, name: str, cmd: str, after_push: bool = False) -> None: """ Patch the configuration file so that we can also test hooks. """ cfg_path = test_repo / "pyproject.toml" parsed = tomlkit.loads(cfg_path.text()) if after_push: key = "after_push" else: key = "before_commit" if key not in parsed["tool"]["tbump"]: parsed["tool"]["tbump"][key] = tomlkit.aot() hook_config = tomlkit.table() hook_config.add("cmd", cmd) hook_config.add("name", name) parsed["tool"]["tbump"][key].append(hook_config) from pprint import pprint pprint(parsed) cfg_path.write_text(tomlkit.dumps(parsed)) tbump.git.run_git(test_repo, "add", ".") tbump.git.run_git(test_repo, "commit", "--message", "update hooks")
def change_end_date(data): message = "" try: config = read_config(config_path) except FileNotFoundError: message = f"There was a problem. The configuration file `{config_path}`, could not be found or read." logger.exception("", exc_info=True) except Exception: message = f"There was a problem with configuration file: `{config_path}`" logger.exception("", exc_info=True) else: new_end_date = data["text"].split(" ")[4] try: new_date = date.fromisoformat(new_end_date) config["end_date"] = new_end_date new_config = toml.dumps(config) write_config(new_config, config_path) message = f"Date changed to {new_end_date}" # need to write better error message for this(date is out of range or wrong formatting) except ValueError: logger.info("User inputted {new_end_date} for end date. ") logger.exception("", exc_info=True) message = "Format should be yyyy-mm-dd, i.e. 2019-02-03" response = slack_client.chat_postMessage(channel="#general", text=message) try: assert response["ok"] except AssertionError: logger.exception("", exc_info=True) logger.debug(f"{response}")
def write_pyproject(self, show_message: bool = True) -> None: with atomic_open_for_write(self.pyproject_file.as_posix(), encoding="utf-8") as f: f.write(tomlkit.dumps(self.pyproject)) if show_message: stream.echo("Changes are written to pyproject.toml.") self._pyproject = None
def merge_to_toml_config_files(self, configuration=None, testbed_configuration=None, user_settings=None, write_config_files=True): self.config_files = surfex.merge_config_files_dict( self.config_files, configuration=configuration, testbed_configuration=testbed_configuration, user_settings=user_settings) for f in self.config_files: this_config_file = "config/" + f block_config = self.config_files[f]["toml"] if write_config_files: f_out = self.wd + "/" + this_config_file dirname = os.path.dirname(f_out) # print(dirname) dirs = dirname.split("/") # print(dirs) if len(dirs) > 1: p = "/" for d in dirs[1:]: p = p + d # print(p) os.makedirs(p, exist_ok=True) p = p + "/" f_out = open(f_out, "w") f_out.write(tomlkit.dumps(block_config))
def update_pyproject_toml(): """Generate a 'pyproject.toml' file, or update an existing one. This function generates/updates the ``build-system`` section, to be consistent with the 'setup.json' file. """ # read the current file toml_path = ROOT / 'pyproject.toml' if toml_path.exists(): pyproject = toml.loads(toml_path.read_text(encoding='utf8')) else: pyproject = {} # Read the requirements from 'setup.json' setup_cfg = _load_setup_cfg() install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] for requirement in install_requirements: if requirement.name == 'reentry': reentry_requirement = requirement break else: raise DependencySpecificationError("Failed to find reentry requirement in 'setup.json'.") # update the build-system key pyproject.setdefault('build-system', {}) pyproject['build-system'].update({ 'requires': ['setuptools>=40.8.0,<50', 'wheel', str(reentry_requirement), 'fastentrypoints~=0.12'], 'build-backend': 'setuptools.build_meta:__legacy__', }) # write the new file toml_path.write_text(toml.dumps(pyproject), encoding='utf8')
def write_toml(self, filename: Union[str, Path]) -> PhantomConfig: """Write config to TOML file. Parameters ---------- filename The name of the TOML output file. """ # TODO: writing to TOML does not preserve the comments. document = tomlkit.document() if self.header is not None: for line in self.header: document.add(tomlkit.comment(line)) document.add(tomlkit.nl()) d = self.to_dict() for block_key, block_val in d.items(): block = tomlkit.table() if isinstance(block_val, dict): for name, item in block_val.items(): value, comment = item if isinstance(value, datetime.timedelta): value = _convert_timedelta_to_str(value) block.add(tomlkit.nl()) if comment is not None: block.add(tomlkit.comment(comment)) block.add(name, value) document.add(block_key, block) with open(filename, 'w') as fp: fp.write(tomlkit.dumps(document)) return self
def install_plugin(installed: Repository) -> None: package = ProjectPackage("poetry-instance", __version__) plugin = Package("poetry-plugin", "1.2.3") package.add_dependency( Dependency(plugin.name, "^1.2.3", groups=[SelfCommand.ADDITIONAL_PACKAGE_GROUP]) ) content = Factory.create_pyproject_from_package(package) system_pyproject_file = SelfCommand.get_default_system_pyproject_file() system_pyproject_file.write_text(content.as_string(), encoding="utf-8") lock_content = { "package": [ { "name": "poetry-plugin", "version": "1.2.3", "category": "main", "optional": False, "platform": "*", "python-versions": "*", "checksum": [], }, ], "metadata": { "python-versions": "^3.6", "platform": "*", "content-hash": "123456789", "hashes": {"poetry-plugin": []}, }, } system_pyproject_file.parent.joinpath("poetry.lock").write_text( tomlkit.dumps(lock_content), encoding="utf-8" ) installed.add_package(plugin)
def create_configuration(self, config: Configuration, *style_urls: str) -> None: """Create a configuration file.""" from nitpick.style import StyleManager # pylint: disable=import-outside-toplevel if config.file: doc: TOMLDocument = tomlkit.parse(config.file.read_text()) else: doc = tomlkit.document() config.file = self.root / DOT_NITPICK_TOML if not style_urls: style_urls = (str(StyleManager.get_default_style_url()), ) tool_nitpick = tomlkit.table() tool_nitpick.add( tomlkit.comment("Generated by the 'nitpick init' command")) tool_nitpick.add( tomlkit.comment( f"More info at {READ_THE_DOCS_URL}configuration.html")) tool_nitpick.add( "style", tomlkit.array([tomlkit.string(url) for url in style_urls])) doc.add(SingleKey(TOOL_NITPICK_KEY, KeyType.Bare), tool_nitpick) # config.file will always have a value at this point, but mypy can't see it. config.file.write_text(tomlkit.dumps(doc, sort_keys=True)) # type: ignore
def format_toml(self): """Serialize metadata as TOML. Returns: str: Serialized metadata as a TOML-formatted string """ return toml.dumps(self._meta)
def generate_poetry_content(self): template = POETRY_DEFAULT if self._license: template = POETRY_WITH_LICENSE content = loads(template) poetry_content = content["tool"]["poetry"] poetry_content["name"] = self._project poetry_content["version"] = self._version poetry_content["description"] = self._description poetry_content["authors"].append(self._author) if self._license: poetry_content["license"] = self._license poetry_content["dependencies"]["python"] = self._python for dep_name, dep_constraint in self._dependencies.items(): poetry_content["dependencies"][dep_name] = dep_constraint for dep_name, dep_constraint in self._dev_dependencies.items(): poetry_content["dev-dependencies"][dep_name] = dep_constraint # Add build system build_system = table() build_system_version = ">=" + BUILD_SYSTEM_MIN_VERSION if BUILD_SYSTEM_MAX_VERSION is not None: build_system_version += ",<" + BUILD_SYSTEM_MAX_VERSION build_system.add("requires", ["poetry-core" + build_system_version]) build_system.add("build-backend", "poetry.core.masonry.api") content.add("build-system", build_system) return dumps(content)
def dumps(self, reqs, project: RootDependency, content=None) -> str: doc = document() deps = [] for req in reqs: deps.append(self._format_req(req=req)) doc['build-system']['requires'] = deps return dumps(doc)
def _validate_engine_path(): ''' Validates path 'Engine Path' in settings.toml ''' if not sys.platform.startswith('win'): return '' try: import natlink # pylint: disable=import-error except ImportError: return '' if os.path.isfile(_SETTINGS_PATH): with io.open(_SETTINGS_PATH, "rt", encoding="utf-8") as toml_file: data = tomlkit.loads(toml_file.read()).value engine_path = data["paths"]["ENGINE_PATH"] if os.path.isfile(engine_path): return engine_path else: engine_path = _find_natspeak() data["paths"]["ENGINE_PATH"] = engine_path try: formatted_data = str(tomlkit.dumps(data)) with io.open(_SETTINGS_PATH, "w", encoding="utf-8") as toml_file: toml_file.write(formatted_data) printer.out( "Setting engine path to {}".format(engine_path)) except Exception as e: printer.out("Error saving settings file {} {} ".format( e, _SETTINGS_PATH)) return engine_path else: return _find_natspeak()
def generate_queries(args: argparse.Namespace): client = MiniClient(args.host, args.port) # dump mapping mappings = client.get_mapping(index=args.index)['mappings'] all_fields = list(get_fields(mappings['properties'])) doc = tomlkit.document() dt_str = get_iso8601_dt_str() comment = tomlkit.comment(f"Automatically generated on {dt_str}") # build all queries queries = tomlkit.table() for field_name in all_fields: t = tomlkit.table() t.add(comment) replace_name = field_name.replace('.', '_') query_name = f'field_{replace_name}_exists' query = json.dumps({'query': {'exists': {'field': field_name}}}) t.add('query', query) t.add('auto_gen', True) queries.add(query_name, t) doc["queries"] = queries with open(args.output, 'w') as f: f.write(tomlkit.dumps(doc))
def save_toml_file(data, path): try: formatted_data = unicode(tomlkit.dumps(data)) with io.open(path, "wt", encoding="utf-8") as f: f.write(formatted_data) except Exception: simple_log(True)
def toml_headers(): filename = Path("_netlify.toml") doc = document() doc.add(comment("netlify.toml")) doc.add(comment("Generated: " + datetime.now().isoformat())) build = table() env = table().indent(2) env["YARN_VERSION"] = "1.21.0" build["publish"] = "_site/" build["command"] = "make build" build["environment"] = env doc["build"] = build headers = aot() sw = make_headers("sw.js", { "service-worker-allowed": "/", "cache-control": NO_CACHE }) headers.append(sw) manifest = make_headers("**/manifest.json", {"cache-control": NO_CACHE}) headers.append(manifest) for pattern in FOREVER_PATTERNS: headers.append(make_headers(pattern, {"cache-control": CACHE_FOREVER})) doc["headers"] = headers output = dumps(doc) print(output) sz = filename.write_text(output) print(sz)
def dumps(self, reqs, project: RootDependency, content=None) -> str: doc = tomlkit.parse(content) if content else tomlkit.document() doc['package'] = [self._format_req(req=req) for req in reqs] # add extras extras = defaultdict(list) for req in reqs: if req.is_main: for extra in req.main_envs: extras[extra].append(req.name) if req.is_dev: for extra in req.dev_envs: extras[extra].append(req.name) if extras: doc['extras'] = dict(extras) doc['metadata'] = { # sha256 of tool.poetry section from pyproject.toml # 'content-hash': ..., # 'platform': '*', 'python-versions': str(project.python), } doc['metadata']['hashes'] = tomlkit.table() for req in reqs: doc['metadata']['hashes'][req.name] = list(req.hashes or []) return tomlkit.dumps(doc)
def update_pyproject_version( self, new_version: str, *, develop: bool = False, ) -> None: """ Update the version in the pyproject.toml file """ version = safe_version(new_version) if develop: version = f'{version}.dev1' pyproject_toml = tomlkit.parse(self.pyproject_toml_path.read_text()) if 'tool' not in pyproject_toml: tool_table = tomlkit.table() pyproject_toml['tool'] = tool_table if 'poetry' not in pyproject_toml['tool']: poetry_table = tomlkit.table() pyproject_toml['tool'].add('poetry', poetry_table) pyproject_toml['tool']['poetry']['version'] = version self.pyproject_toml_path.write_text(tomlkit.dumps(pyproject_toml))
def _write(self): """ Writes build definition details into build.toml file, which would be used by the next build. build.toml file will contain the same information as build graph, function details will only be preserved as function names layer details will only be preserved as layer names """ # convert build definition list into toml table function_build_definitions_table = tomlkit.table() for build_definition in self._function_build_definitions: build_definition_as_table = _function_build_definition_to_toml_table(build_definition) function_build_definitions_table.add(build_definition.uuid, build_definition_as_table) layer_build_definitions_table = tomlkit.table() for build_definition in self._layer_build_definitions: build_definition_as_table = _layer_build_definition_to_toml_table(build_definition) layer_build_definitions_table.add(build_definition.uuid, build_definition_as_table) # create toml document and add build definitions document = tomlkit.document() document.add(tomlkit.comment("This file is auto generated by SAM CLI build command")) document.add(BuildGraph.FUNCTION_BUILD_DEFINITIONS, function_build_definitions_table) document.add(BuildGraph.LAYER_BUILD_DEFINITIONS, layer_build_definitions_table) if not self._filepath.exists(): open(self._filepath, "a+").close() self._filepath.write_text(tomlkit.dumps(document))
def change_start_date(data): message = "" try: config = read_config(config_path) except FileNotFoundError: message = f"There was a problem. The configuration file `{config_path}`, could not be found or read." logger.exception("", exc_info=True) except Exception as error: message = f"There was a problem with configuration file: `{config_path}`" logger.exception("", exc_info=True) else: new_start_date = data["text"].split(" ")[4] try: new_date = date.fromisoformat(new_start_date) config["start_date"] = new_start_date new_config = toml.dumps(config) write_config(new_config, config_path) message = f"Date changed to {new_start_date}" logger.info(f"Start date changed to {new_start_date}") except ValueError as error: logger.info(f"User wrote this date as input: {new_start_date}") logger.info(f"User wrote this date with error: {error}") message = f"Format should be yyyy-mm-dd, i.e. 2019-01-03" response = slack_client.chat_postMessage(channel="#general", text=message) try: assert response["ok"] except AssertionError: logger.exception("", exc_info=True) logger.debug(f"{response}")
def to_str(self) -> str: """ Returns the settings object as a serialised TOML string. Returns: str: Settings as TOML string. """ return tomlkit.dumps(self.dict())
def _write_metadata_if_needed(self, f): # write metadata only if it had before if self._metadata: self.updated_at = self._metadata['updated'] = datetime.now() f.write(TOML_DELIMLF) f.write(tomlkit.dumps(self._metadata)) f.write('\n') f.write(TOML_DELIMLF)
def write_lockfile(self, toml_data: Container, show_message: bool = True) -> None: toml_data.update({"root": self.get_project_metadata()}) with atomic_open_for_write(self.lockfile_file) as fp: fp.write(tomlkit.dumps(toml_data)) if show_message: context.io.echo("Changes are written to pdm.lock.") self._lockfile = None
def dumps(obj: Union[TOMLMapping, dict]) -> str: # and do not change the object # TODO: If/when tomlkit allows serialising arbitrary Mapping objects, replace union # with Mapping """Serialize a dict-like object into a TOML str, If the object was generated via :obj:`loads`, then the style will be preserved. """ return tomlkit.dumps(obj) # type: ignore[arg-type]
def generate_poetry_content(self): template = POETRY_DEFAULT if self._license: template = POETRY_WITH_LICENSE content = loads(template) poetry_content = content["tool"]["poetry"] poetry_content["name"] = self._project poetry_content["version"] = self._version poetry_content["description"] = self._description poetry_content["authors"].append(self._author) if self._license: poetry_content["license"] = self._license poetry_content["dependencies"]["python"] = self._python for dep_name, dep_constraint in self._dependencies.items(): poetry_content["dependencies"][dep_name] = dep_constraint for dep_name, dep_constraint in self._dev_dependencies.items(): poetry_content["dev-dependencies"][dep_name] = dep_constraint return dumps(content)