def parsed_pipfile(self): # Open the pipfile, read it into memory. with open(self.pipfile_location) as f: contents = f.read() # If any outline tables are present... if ('[packages.' in contents) or ('[dev-packages.' in contents): data = toml.loads(contents) # Convert all outline tables to inline tables. for section in ('packages', 'dev-packages'): for package in data.get(section, {}): # Convert things to inline tables — fancy :) if hasattr(data[section][package], 'keys'): _data = data[section][package] data[section][package] = toml._get_empty_inline_table(dict) data[section][package].update(_data) # We lose comments here, but it's for the best.) try: return contoml.loads(toml.dumps(data, preserve=True)) except RuntimeError: return toml.loads(toml.dumps(data, preserve=True)) else: # Fallback to toml parser, for large files. try: return contoml.loads(contents) except Exception: return toml.loads(contents)
async def write(store, media_dir): global prev state = store.state user_state = state['app']['user'] books = user_state['books'] selected_book = user_state['current_book'] selected_lang = user_state['current_language'] if selected_book != prev['current_book']: if not selected_book == manual_filename: selected_book = os.path.relpath(selected_book, media_dir) s = toml.dumps({'current_book': selected_book, 'current_language': selected_lang}) path = os.path.join(media_dir, 'sd-card', USER_STATE_FILE) async with aiofiles.open(path, 'w') as f: await f.write(s) for filename in books: book = books[filename] if filename in prev['books']: prev_book = prev['books'][filename] else: prev_book = BookFile() if book.page_number != prev_book.page_number or book.bookmarks != prev_book.bookmarks: path = to_state_file(book.filename) if book.filename == manual_filename: path = os.path.join(media_dir, path) bms = [bm + 1 for bm in book.bookmarks if bm != 'deleted'] # remove start-of-book and end-of-book bookmarks bms = bms[1:-1] # ordered to make sure current_page comes before bookmarks d = OrderedDict([['current_page', book.page_number + 1], ['bookmarks', bms]]) s = toml.dumps(d) async with aiofiles.open(path, 'w') as f: await f.write(s) prev = user_state
def _parse_pipfile(self, contents): # If any outline tables are present... if ("[packages." in contents) or ("[dev-packages." in contents): data = toml.loads(contents) # Convert all outline tables to inline tables. for section in ("packages", "dev-packages"): for package in data.get(section, {}): # Convert things to inline tables — fancy :) if hasattr(data[section][package], "keys"): _data = data[section][package] data[section][package] = toml._get_empty_inline_table(dict) data[section][package].update(_data) # We lose comments here, but it's for the best.) try: return contoml.loads(toml.dumps(data, preserve=True)) except RuntimeError: return toml.loads(toml.dumps(data, preserve=True)) else: # Fallback to toml parser, for large files. try: return contoml.loads(contents) except Exception: return toml.loads(contents)
def create_config(self): bgp_config = {'Global': {'GlobalConfig': {'As': self.asn, 'RouterId': self.router_id}}} for peer, info in self.peers.iteritems(): if self.asn == peer.asn: peer_type = self.PEER_TYPE_INTERNAL else: peer_type = self.PEER_TYPE_EXTERNAL afi_safi_list = [] version = netaddr.IPNetwork(info['neigh_addr']).version if version == 4: afi_safi_list.append({'AfiSafiName': 'ipv4-unicast'}) elif version == 6: afi_safi_list.append({'AfiSafiName': 'ipv6-unicast'}) else: Exception('invalid ip address version. {0}'.format(version)) if info['evpn']: afi_safi_list.append({'AfiSafiName': 'l2vpn-evpn'}) n = {'NeighborConfig': {'NeighborAddress': info['neigh_addr'].split('/')[0], 'PeerAs': peer.asn, 'AuthPassword': info['passwd'], 'PeerType': peer_type, }, 'AfiSafis': {'AfiSafiList': afi_safi_list} } if info['passive']: n['TransportOptions'] = {'PassiveMode':True} if info['is_rs_client']: n['RouteServer'] = {'RouteServerClient': True} if 'Neighbors' not in bgp_config: bgp_config['Neighbors'] = {'NeighborList': []} bgp_config['Neighbors']['NeighborList'].append(n) dplane_config = {'Type': 'netlink', 'VirtualNetworkList': []} for info in self.vns: dplane_config['VirtualNetworkList'].append({'RD': '{0}:{1}'.format(self.asn, info['vni']), 'VNI': info['vni'], 'VxlanPort': info['vxlan_port'], 'VtepInterface': info['vtep'], 'Etag': info['color'], 'SniffInterfaces': info['member'], 'MemberInterfaces': info['member']}) config = {'Bgp': bgp_config, 'Dataplane': dplane_config} with open('{0}/goplaned.conf'.format(self.config_dir), 'w') as f: print colors.yellow(toml.dumps(config)) f.write(toml.dumps(config))
def create_config(self): config = {'Global': {'As': self.asn, 'RouterId': self.router_id}} for peer, info in self.peers.iteritems(): if self.asn == peer.asn: peer_type = self.PEER_TYPE_INTERNAL else: peer_type = self.PEER_TYPE_EXTERNAL afi_safi_list = [] version = netaddr.IPNetwork(info['neigh_addr']).version if version == 4: afi_safi_list.append({'AfiSafiName': 'ipv4-unicast'}) elif version == 6: afi_safi_list.append({'AfiSafiName': 'ipv6-unicast'}) else: Exception('invalid ip address version. {0}'.format(version)) if info['evpn']: afi_safi_list.append({'AfiSafiName': 'l2vpn-evpn'}) afi_safi_list.append({'AfiSafiName': 'encap'}) afi_safi_list.append({'AfiSafiName': 'rtc'}) n = {'NeighborAddress': info['neigh_addr'].split('/')[0], 'PeerAs': peer.asn, 'AuthPassword': info['passwd'], 'PeerType': peer_type, 'AfiSafiList': afi_safi_list} if info['passive']: n['TransportOptions'] = {'PassiveMode': True} if info['is_rs_client']: n['RouteServer'] = {'RouteServerClient': True} if info['is_rr_client']: clusterId = info['cluster_id'] n['RouteReflector'] = {'RouteReflectorClient': True, 'RouteReflectorClusterId': clusterId} if 'NeighborList' not in config: config['NeighborList'] = [] config['NeighborList'].append(n) with open('{0}/gobgpd.conf'.format(self.config_dir), 'w') as f: print colors.yellow('[{0}\'s new config]'.format(self.name)) print colors.yellow(indent(toml.dumps(config))) f.write(toml.dumps(config))
def create_goplane_config(self): dplane_config = {'type': 'netlink', 'virtual-network-list': []} for info in self.vns: dplane_config['virtual-network-list'].append({'rd': '{0}:{1}'.format(self.asn, info['vni']), 'vni': info['vni'], 'vxlan-port': info['vxlan_port'], 'vtep-interface': info['vtep'], 'etag': info['color'], 'sniff-interfaces': info['member'], 'member-interfaces': info['member']}) config = {'dataplane': dplane_config} with open('{0}/goplaned.conf'.format(self.config_dir), 'w') as f: print colors.yellow(toml.dumps(config)) f.write(toml.dumps(config))
def __init__(self, overrides=None, valgrind=False): self.pid = None with open(os.path.join(constants.ASSETS_DIR, "test.toml")) as fp: self.cf = toml.load(fp) if overrides: update_nested_dict(self.cf, overrides) self.cf["core"].pop("admin_host", None) # extract some field from cf self.data_store = self.cf["zone_source"]["type"].lower() self.pidfile = self.cf["core"]["pidfile"] self.admin_host = self.cf["core"].get("admin_host", None) self.admin_port = self.cf["core"].get("admin_port", None) self.dns_port = self.cf["core"]["port"] self.dns_host = self.cf["core"]["bind"] # override mongo host and mongo port mongo_conf = self.cf["zone_source"]["mongo"] if self.data_store == "mongo": self.zm = ZoneMongo(constants.MONGO_HOST, constants.MONGO_PORT, mongo_conf["dbname"]) self.valgrind = valgrind self.cf_str = toml.dumps(self.cf) self.fp = tempfile.NamedTemporaryFile() self.fp.write(self.cf_str.encode("utf8")) self.fp.flush() fname = self.fp.name if self.valgrind: # TODO find the bug of possible lost and still reachable memory self.cmd = "valgrind --leak-check=full --show-reachable=no --show-possibly-lost=no %s -c %s" % (DNS_BIN, fname) else: self.cmd = "%s -c %s" % (DNS_BIN, fname) self.vagrant = vagrant.Vagrant(root=os.path.join(constants.REPO_ROOT, "vagrant"))
def save_toml_file(data, path): try: formatted_data = unicode(toml.dumps(data)) with io.open(path, "wt", encoding="utf-8") as f: f.write(formatted_data) except Exception: simple_log(True)
def write_toml(self, data, path=None): """Writes the given data structure out as TOML.""" if path is None: path = self.pipfile_location try: formatted_data = contoml.dumps(data).rstrip() except Exception: for section in ("packages", "dev-packages"): for package in data.get(section, {}): # Convert things to inline tables — fancy :) if hasattr(data[section][package], "keys"): _data = data[section][package] data[section][package] = toml._get_empty_inline_table(dict) data[section][package].update(_data) formatted_data = toml.dumps(data).rstrip() if Path(path).absolute() == Path(self.pipfile_location).absolute(): newlines = self._pipfile_newlines else: newlines = DEFAULT_NEWLINES formatted_data = cleanup_toml(formatted_data) with io.open(path, "w", newline=newlines) as f: f.write(formatted_data) # pipfile is mutated! self.clear_pipfile_cache()
def _save_auth_keys(key_dict): """ :param key_dict: auth parameters dict :type key_dict: dict :rtype: None """ config_path = os.environ[constants.DCOS_CONFIG_ENV] toml_config = config.mutable_load_from_path(config_path) section = 'core' config_schema = json.loads( pkg_resources.resource_string( 'dcoscli', 'data/config-schema/core.json').decode('utf-8')) for k, v in iteritems(key_dict): python_value = jsonitem.parse_json_value(k, v, config_schema) name = '{}.{}'.format(section, k) toml_config[name] = python_value serial = toml.dumps(toml_config._dictionary) with util.open_file(config_path, 'w') as config_file: config_file.write(serial) return None
def config_wizard(): click.echo(''' You'll need to create a last.fm API application first. Do so here: http://www.last.fm/api/account/create What you fill in doesn't matter at all, just make sure to save the API Key and Shared Secret. ''') plex_scrobble = { 'mediaserver_url': 'http://localhost:32400', 'log_file': '/tmp/plex-scrobble.log', 'cache_location': '/tmp/plex_scrobble.cache', 'mediaserver_log_location': platform_log_directory() } config = { 'lastfm': { key: click.prompt(key, type=str) for key in ['user_name', 'password', 'api_key', 'api_secret'] } } config['plex-scrobble'] = { key: click.prompt(key, default=plex_scrobble[key]) for key in plex_scrobble } generated = toml.dumps(config) click.echo('Generated config:\n\n%s' % generated) if click.confirm('Write to ~/.plex-scrobble.toml?'): with open(os.path.expanduser('~/.plex-scrobble.toml'), 'w') as fp: fp.write(generated)
def do_init(cwd: Path, pwd: Optional[Path], name: str, url: str, timezone: Optional[str], force: bool, config_fname: str=CONFIG_FNAME) -> Result[None]: """Initializes a new project. This function may overwrite any preexisting files and or directories in the target working directory. :param pathlib.path cwd: Path to the invocation directory. :param pathlib.path pwd: Path to the project directory to be created. :param str name: Name of the static site, to be put inside the generated config file. :param str url: URL of the static site, to be put inside the generated config file. :param str timezone: Geographical timezone name for timestamp-related values, to be put inside the generated config file. :param bool force: Whether to force project creation in nonempty directories or not. :param str config_name: Name of the config file to generate. :returns: Nothing upon successful execution or an error message when execution fails. :rtype: :class:`Result`. """ name = pwd.name if (not name and pwd is not None) else name pwd = cwd if pwd is None else cwd.joinpath(pwd) try: pwd.mkdir(parents=True, exist_ok=True) except OSError as e: return Result.as_failure(e.strerror) if not force and any(True for _ in pwd.iterdir()): return Result.as_failure( "target project directory is not empty -- use the `-f` flag to" " force init in nonempty directories") rtz = get_tz(timezone) if rtz.is_failure: return rtz # Bootstrap directories. bootstrap_conf = SiteConfig(cwd, pwd, timezone=rtz.data) try: for dk in ("contents_src", "templates_src", "assets_src"): bootstrap_conf[dk].mkdir(parents=True, exist_ok=True) except OSError as e: return Result.as_failure(e.strerror) # Create initial TOML config file. init_conf = OrderedDict([ ("site", OrderedDict([ ("name", name or ""), ("url", url or ""), ("timezone", rtz.data.zone), ])) ]) pwd.joinpath(config_fname).write_text(toml.dumps(init_conf)) return Result.as_success(None)
def _set_config(name, value): with open(_get_config_file(), "r+") as f: config = toml.loads(f.read()) config[name] = value new_conf = toml.dumps(config) f.truncate(0) f.seek(0) f.write(new_conf)
def _save(data, path): '''only to be used for settings file''' try: formatted_data = unicode(toml.dumps(data)) with io.open(path, "wt", encoding="utf-8") as f: f.write(formatted_data) except Exception as e: print "Error saving toml file: " + str(e) + _SETTINGS_PATH
def serialize(self): d = { 'general': { 'measured_edge': self.__edge, 'no_defect': self.__no_defect, }, } return toml.dumps(d)
def save(self): if self._data is None: return with atomic_save(self._path) as outfp: self._data.prune() data = toml.dumps(self._data.to_dict()).encode("utf8") outfp.write(data)
def addSecret(self,name,secret=""): if self.secrets.has_key(name) and secret=="": #generate secret secret=self.secrets[name] if secret=="": secret=j.base.idgenerator.generateGUID().replace("-","") self.secrets[name.strip()]=secret.strip() j.system.fs.writeFile(self.pathsecrets,toml.dumps(self.secrets))
def save(toml_config): """ :param toml_config: TOML configuration object :type toml_config: MutableToml or Toml """ serial = toml.dumps(toml_config._dictionary) path = util.get_config_path() with util.open_file(path, 'w') as config_file: config_file.write(serial)
def create_gobgp_config(self): config = {'global': {'config': {'as': self.asn, 'router-id': self.router_id}}} for peer, info in self.peers.iteritems(): if self.asn == peer.asn: peer_type = self.PEER_TYPE_INTERNAL else: peer_type = self.PEER_TYPE_EXTERNAL afi_safi_list = [] version = netaddr.IPNetwork(info['neigh_addr']).version if version == 4: afi_safi_list.append({'config': {'afi-safi-name': 'ipv4-unicast'}}) elif version == 6: afi_safi_list.append({'config': {'afi-safi-name': 'ipv6-unicast'}}) else: Exception('invalid ip address version. {0}'.format(version)) if info['evpn']: afi_safi_list.append({'config': {'afi-safi-name': 'l2vpn-evpn'}}) n = {'config': { 'neighbor-address': info['neigh_addr'].split('/')[0], 'peer-as': peer.asn, 'local-as': self.asn, 'auth-password': info['passwd'], }, 'afi-safis': afi_safi_list, } if info['passive']: n['transport'] = {'config': {'passive-mode':True}} if info['is_rs_client']: n['route-server'] = {'config': {'route-server-client': True}} if 'neighbors' not in config: config['neighbors'] = [] config['neighbors'].append(n) with open('{0}/gobgpd.conf'.format(self.config_dir), 'w') as f: print colors.yellow(toml.dumps(config)) f.write(toml.dumps(config))
def write_front_matter(meta, style="toml"): if style == "json": import json return json.dumps(meta, indent=4, sort_keys=True) elif style == "toml": import toml return "+++\n" + toml.dumps(meta) + "+++\n" elif style == "yaml": import yaml return "---\n" + yaml.dump(meta) + "---\n" return ""
def _save_config_file(config_path, toml_config): """ :param config_path: path to configuration file. :type config_path: str :param toml_config: TOML configuration object :type toml_config: MutableToml or Toml """ serial = toml.dumps(toml_config._dictionary) with util.open_file(config_path, 'w') as config_file: config_file.write(serial)
def f(config): return { "dirs": ["contents", "assets", "templates"], "files": { CONFIG_FNAME: toml.dumps(config), "templates/page.html": "{{ unit.raw_text }}", "contents/foo.md": "---\ntitle: Foo\n---\n\nfoobar", "assets/foo1.txt": "this is foo1", "assets/txts/bar1.txt": "another file", } }
def translate(inp, out): inp = open(inp) out = open(out, mode='w') _, head, tail = inp.read().split('---') inp.close() obj = yaml.load(head) obj = purge(obj) out.write(toml.dumps(obj)) out.write('\n---\n') tail = tail.replace('[', '{').replace(']', '}') out.write(tail)
def save(toml_config): """ :param toml_config: TOML configuration object :type toml_config: MutableToml or Toml """ serial = toml.dumps(toml_config._dictionary) path = get_config_path() util.ensure_file_exists(path) util.enforce_file_permissions(path) with util.open_file(path, 'w') as config_file: config_file.write(serial)
def write_table(self): """ |write_table| with `TOML <https://github.com/toml-lang/toml>`__ format. :raises pytablewriter.EmptyTableNameError: If the |headers| is empty. :raises pytablewriter.EmptyHeaderError: If the |headers| is empty. :Example: :ref:`example-toml-table-writer` """ import toml with self._logger: self._verify_property() self.stream.write(toml.dumps(self.tabledata.as_dict()))
def _do_serialize(struct, fmt, encoding): """Actually serialize input. Args: struct: structure to serialize to fmt: format to serialize to encoding: encoding to use while serializing Returns: encoded serialized structure Raises: various sorts of errors raised by libraries while serializing """ res = None _check_lib_installed(fmt, 'serialize') if fmt == 'ini': config = configobj.ConfigObj(encoding=encoding) for k, v in struct.items(): config[k] = v res = b'\n'.join(config.write()) elif fmt in ['json', 'json5']: # specify separators to get rid of trailing whitespace # specify ensure_ascii to make sure unicode is serialized in \x... sequences, # not in \u sequences res = (json if fmt == 'json' else json5).dumps(struct, indent=2, separators=(',', ': '), ensure_ascii=False).encode(encoding) elif fmt == 'toml': if not _is_utf8(encoding): raise AnyMarkupError('toml must always be utf-8 encoded according to specification') res = toml.dumps(struct).encode(encoding) elif fmt == 'xml': # passing encoding argument doesn't encode, just sets the xml property res = xmltodict.unparse(struct, pretty=True, encoding='utf-8').encode('utf-8') elif fmt == 'yaml': res = yaml.safe_dump(struct, encoding='utf-8', default_flow_style=False) else: raise # unknown format return res
def load(cls): tool_config_path = Path( str( os.environ.get( 'CHANGES_CONFIG_FILE', expanduser('~/.changes') if not compat.IS_WINDOWS else expandvars(r'%APPDATA%\\.changes'), ) ) ) tool_settings = None if tool_config_path.exists(): tool_settings = Changes(**(toml.load(tool_config_path.open())['changes'])) # envvar takes precedence over config file settings auth_token = os.environ.get(AUTH_TOKEN_ENVVAR) if auth_token: info('Found Github Auth Token in the environment') tool_settings = Changes(auth_token=auth_token) elif not (tool_settings and tool_settings.auth_token): while not auth_token: info('No auth token found, asking for it') # to interact with the Git*H*ub API note('You need a Github Auth Token for changes to create a release.') click.pause( 'Press [enter] to launch the GitHub "New personal access ' 'token" page, to create a token for changes.' ) click.launch('https://github.com/settings/tokens/new') auth_token = click.prompt('Enter your changes token') if not tool_settings: tool_settings = Changes(auth_token=auth_token) tool_config_path.write_text( toml.dumps({'changes': attr.asdict(tool_settings)}) ) return tool_settings
def load(cls, repository): changes_project_config_path = Path(PROJECT_CONFIG_FILE) project_settings = None if changes_project_config_path.exists(): # releases_directory, labels project_settings = Project( **(toml.load(changes_project_config_path.open())['changes']) ) if not project_settings: releases_directory = Path( click.prompt( 'Enter the directory to store your releases notes', DEFAULT_RELEASES_DIRECTORY, type=click.Path(exists=True, dir_okay=True), ) ) if not releases_directory.exists(): debug( 'Releases directory {} not found, creating it.'.format( releases_directory ) ) releases_directory.mkdir(parents=True) project_settings = Project( releases_directory=str(releases_directory), labels=configure_labels(repository.labels), ) # write config file changes_project_config_path.write_text( toml.dumps({'changes': attr.asdict(project_settings)}) ) project_settings.repository = repository project_settings.bumpversion = BumpVersion.load(repository.latest_version) return project_settings
def policy_from_crawl(prof): conf = {} conf['scripts'] = {} opts = [] hosts = [] if not prof['js_sources'] and not prof['inline']: conf['scripts']['allow'] = 'none' return conf if prof['js_sources']: conf['scripts']['allow'] = 'custom' for source in prof['js_sources']: if source == 'HOME': opts.append('self') else: hosts.append(source) if prof['inline']: opts.append('inline') conf['scripts']['options'] = opts conf['scripts']['hosts'] = hosts return toml.dumps(conf)
def write_toml(self, data, path=None): """Writes the given data structure out as TOML.""" if path is None: path = self.pipfile_location try: formatted_data = contoml.dumps(data).rstrip() except Exception: for section in ('packages', 'dev-packages'): for package in data[section]: # Convert things to inline tables — fancy :) if hasattr(data[section][package], 'keys'): _data = data[section][package] data[section][package] = toml._get_empty_inline_table(dict) data[section][package].update(_data) formatted_data = toml.dumps(data).rstrip() formatted_data = cleanup_toml(formatted_data) with open(path, 'w') as f: f.write(formatted_data)
def read_config_file_scrubbed(): return toml.dumps(scrub_secrets(load_config()))
def present(module, dest, conf, jsonbool, merge, create, backup): diff = { 'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest } b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): if not create: module.fail_json(rc=257, msg='Destination %s does not exist !' % dest) b_destpath = os.path.dirname(b_dest) if not os.path.exists(b_destpath) and not module.check_mode: os.makedirs(b_destpath) b_lines = [] else: f = open(b_dest, 'rb') b_lines = f.readlines() f.close() lines = to_native(b('').join(b_lines)) if module._diff: diff['before'] = lines b_conf = to_bytes(conf, errors='surrogate_or_strict') tomlconfig = pytoml.loads(lines) config = {} if jsonbool: config = eval(b_conf) else: config = pytoml.loads(b_conf) if not isinstance(config, dict): if jsonbool: module.fail_json( msg="Invalid value in json parameter: {0}".format(config)) else: module.fail_json( msg="Invalid value in toml parameter: {0}".format(config)) b_lines_new = b_lines msg = '' changed = False if not merge: if tomlconfig != config: b_lines_new = to_bytes(pytoml.dumps(config)) msg = 'config overwritten' changed = True else: mergedconfig = deepmerge(tomlconfig, config) if tomlconfig != mergedconfig: b_lines_new = to_bytes(pytoml.dumps(mergedconfig)) msg = 'config merged' changed = True if module._diff: diff['after'] = to_native(b('').join(b_lines_new)) backupdest = "" if changed and not module.check_mode: if backup and os.path.exists(b_dest): backupdest = module.backup_local(dest) write_changes(module, b_lines_new, dest) if module.check_mode and not os.path.exists(b_dest): module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % dest attr_diff['after_header'] = '%s (file attributes)' % dest difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def save_object(object_name, obj): _ensure_base_storage_dir_created() with open(_name_to_path(object_name), 'w') as file: file.write(toml.dumps(obj))
def test_array_sep(): encoder = toml.TomlArraySeparatorEncoder(separator=",\t") d = {"a": [1, 2, 3]} o = toml.loads(toml.dumps(d, encoder=encoder)) assert o == toml.loads(toml.dumps(o, encoder=encoder))
def write(self, data): # format TOML data. with open(self.pipfile_location, 'w') as f: f.write(format_toml(toml.dumps(data)))
def update_last_harvest(self): data = toml.loads(db_path.read_text()) if db_path.exists() else {} data[self.address] = int(time.time()) db_path.write_text(toml.dumps(data))
) config_str = """ gas_price = 777 [[accounts]] from_address = "0x111" private_key = "secret" [[accounts.txs]] to_address = "a1" value = 2 [[accounts.txs]] to_address = "a2" value = 3 [[accounts]] from_address = "0x222" private_key = "secret" [[accounts.txs]] to_address = "a3" value = 4 [[accounts.txs]] to_address = "a4" value = 5 """ assert toml.dumps(asdict(config)).strip() == config_str.strip() assert toml.loads(config_str) == asdict(config)
def subscriptions_new(ip, dnsservice, domain, token, target): # check if already one subscrption exists (limit to just one) # https://github.com/rootzoll/raspiblitz/issues/1786 if Path(SUBSCRIPTIONS_FILE).is_file(): subs = toml.load(SUBSCRIPTIONS_FILE) if "subscriptions_letsencrypt" in subs: if len(subs['subscriptions_letsencrypt']) > 0: raise BlitzError("not more than one letsencrypt subscription", "cancel existing letsencrypt first") # domain needs to be the full domain name if domain.find(".") == -1: raise BlitzError("not a fully qualified domain name", domain) # check if domain already exists if len(get_subscription(domain)) > 0: raise BlitzError("domain already exists", domain) # make sure lets encrypt client is installed os.system("/home/admin/config.scripts/bonus.letsencrypt.sh on") # dyndns real_ip = ip if ip == "dyndns": update_url = "" if dnsservice == "duckdns": update_url = "https://www.duckdns.org/update?domains={0}&token={1}".format( domain, token, ip) subprocess.run([ '/home/admin/config.scripts/internet.dyndomain.sh', 'on', domain, update_url ], stdout=subprocess.PIPE).stdout.decode('utf-8').strip() real_ip = cfg.public_ip if dnsservice == "dynu": raise BlitzError( "not implemented", "dynamic ip updating for dynu.com not implemented yet ", e) sys.exit(0) # update DNS with actual IP if dnsservice == "duckdns": print("# dnsservice=duckdns --> update {0}".format(domain)) duckdns_update(domain, token, real_ip) elif dnsservice == "dynu": print("# dnsservice=dynu --> update {0}".format(domain)) dynu_update(domain, token, real_ip) # create subscription data for storage subscription = dict() subscription['type'] = "letsencrypt-v1" subscription['id'] = domain subscription['active'] = True subscription['name'] = "{0} for {1}".format(dnsservice, domain) subscription['dnsservice_type'] = dnsservice subscription['dnsservice_token'] = token subscription['ip'] = ip subscription['target'] = target subscription['description'] = "For {0}".format(target) subscription['time_created'] = str( datetime.now().strftime("%Y-%m-%d %H:%M")) subscription['warning'] = "" # load, add and store subscriptions try: os.system("sudo chown admin:admin {0}".format(SUBSCRIPTIONS_FILE)) if Path(SUBSCRIPTIONS_FILE).is_file(): print("# load toml file") subscriptions = toml.load(SUBSCRIPTIONS_FILE) else: print("# new toml file") subscriptions = {} if "subscriptions_letsencrypt" not in subscriptions: subscriptions['subscriptions_letsencrypt'] = [] subscriptions['subscriptions_letsencrypt'].append(subscription) with open(SUBSCRIPTIONS_FILE, 'w') as writer: writer.write(toml.dumps(subscriptions)) writer.close() except Exception as e: eprint(e) raise BlitzError("fail on subscription storage", str(subscription), e) # run the ACME script print("# Running letsencrypt ACME script ...") acme_result = subprocess.Popen([ "/home/admin/config.scripts/bonus.letsencrypt.sh", "issue-cert", dnsservice, domain, token, target ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf8') out, err = acme_result.communicate() eprint(str(out)) eprint(str(err)) if out.find("error=") > -1: time.sleep(6) raise BlitzError("letsancrypt acme failed", out) print("# OK - LETSENCRYPT DOMAIN IS READY") return subscription
def generate_sources(cargo_lock): sources = [] cargo_vendored_sources = { VENDORED_SOURCES: { 'directory': f'{CARGO_CRATES}' }, 'crates-io': { 'replace-with': VENDORED_SOURCES }, } metadata = cargo_lock.get('metadata') for package in cargo_lock['package']: name = package['name'] version = package['version'] if 'source' in package: source = package['source'] if source.startswith('git+'): git_sources, cargo_vendored_entry = get_git_sources(package) sources += git_sources cargo_vendored_sources.update(cargo_vendored_entry) continue else: key = f'checksum {name} {version} ({source})' if metadata is not None and key in metadata: checksum = metadata[key] elif 'checksum' in package: checksum = package['checksum'] else: logging.warning(f'{name} doesn\'t have checksum') continue else: logging.warning(f'{name} has no source') logging.debug(f'Package for {name}: {package}') continue sources += [ { 'type': 'file', 'url': f'{CRATES_IO}/{name}/{name}-{version}.crate', 'sha256': checksum, 'dest': CARGO_CRATES, 'dest-filename': f'{name}-{version}.crate' }, { 'type': 'file', 'url': 'data:' + urlquote(json.dumps({ 'package': checksum, 'files': {} })), 'dest': f'{CARGO_CRATES}/{name}-{version}', 'dest-filename': '.cargo-checksum.json', }, ] sources.append({ 'type': 'shell', 'dest': CARGO_CRATES, 'commands': ['for c in *.crate; do tar -xf $c; done'] }) logging.debug(f'Vendored sources: {cargo_vendored_sources}') sources.append({ 'type': 'file', 'url': 'data:' + urlquote(toml.dumps({ 'source': cargo_vendored_sources, })), 'dest': CARGO_HOME, 'dest-filename': 'config' }) return sources
def save_config(config): ensure_parent_dir_exists(CONFIG_FILE) with atomic_write(CONFIG_FILE, mode='wb', overwrite=True) as f: f.write(toml.dumps(config).encode('utf-8'))
def print_config(self): """Print the current state of the configuration.""" if self.configuration: print(toml.dumps(self.configuration))
def test_tuple(): d = {"a": (3, 4)} o = toml.loads(toml.dumps(d)) assert o == toml.loads(toml.dumps(o))
def json_to_toml(github_action_config_json: str) -> str: return toml.dumps(json.loads(github_action_config_json))
def toml(self) -> str: if __HAVE_TOML__: return toml.dumps(self) raise ValueError('TOML support is not available. `pip3 install toml`')
def save_config(config, target): with open(target, 'wt') as fp: fp.write(toml.dumps(config, encoder=DottedTomlEncoder())) return True
async def state_step(self) -> None: self.wait_steps -= 1 if self.wait_steps > 0: return if self.__state == "Empty": if len(self.unit_queue) == 0: # with (TARGET_DIR / "upgrade.json").open("w") as f: # f.write(json.dumps({"Upgrade": self.data_upgrades}, indent=4)) # with (TARGET_DIR / "unit.json").open("w") as f: # f.write(json.dumps({"Unit": self.data_units}, indent=4)) # with (TARGET_DIR / "ability.json").open("w") as f: # f.write(json.dumps({"Ability": self.data_abilities}, indent=4)) data: dict = { "Upgrade": self.data_upgrades, "Ability": self.data_abilities, "Unit": self.data_units } with (TARGET_DIR / "data_readable.json").open("w") as f: f.write(json.dumps(data, indent=4, sort_keys=True)) with (TARGET_DIR / "data.json").open("w") as f: f.write(json.dumps(data, sort_keys=True)) with (TARGET_DIR / "upgrade.toml").open("w") as f: f.write(toml.dumps({"Upgrade": self.data_upgrades})) with (TARGET_DIR / "unit.toml").open("w") as f: f.write(toml.dumps({"Unit": self.data_units})) with (TARGET_DIR / "ability.toml").open("w") as f: f.write(toml.dumps({"Ability": self.data_abilities})) await self._client.leave() return print("Units left:", len(self.unit_queue)) self.current_unit = self.unit_queue.pop() await self._client.debug_create_unit([[ UnitTypeId(self.current_unit), 1, self._game_info.map_center, 1 ]]) self.time_left = 10 self.__state = "WaitCreate" elif self.__state == "WaitCreate": if len(self.all_own_units ) == 0 and self.current_unit == UnitTypeId.LARVA.value: # Larva cannot be created without a hatchery await self._client.debug_create_unit( [[UnitTypeId.HATCHERY, 1, self._game_info.map_center, 1]]) self.wait_steps = 10 return elif len(self.all_own_units) == 0: self.time_left -= 1 if self.time_left < 0: index = [ i for i, u in enumerate(self.data_units) if u["id"] == self.current_unit ][0] del self.data_units[index] self.__state = "Clear" else: cands = [ u for u in self.all_own_units if u._proto.unit_type == self.current_unit ] if len(cands) == 0: # Check for some allowed specialization su = self.all_own_units.first.name.upper() lu = UnitTypeId(self.current_unit).name.upper() if len(self.all_own_units) == 1 and (su in lu or all( n.startswith("CREEPTUMOR") for n in (su, lu))): unit = self.all_own_units.first else: assert ( False ), f"Invalid self.all_own_units (looking for {UnitTypeId(self.current_unit) !r}): {self.all_own_units}" else: unit = cands[0] assert unit.is_ready index = [ i for i, u in enumerate(self.data_units) if u["id"] == self.current_unit ][0] if self.current_unit in [ UnitTypeId.CREEPTUMOR.value, UnitTypeId.CREEPTUMORQUEEN.value ]: # TODO: Handle this properly # Creep tumors automatically burrow when complete # CREEPTUMORBURROWED pass elif self.current_unit == UnitTypeId.LARVA.value: # Larva must be selected unit = self.all_own_units(UnitTypeId.LARVA).first elif self.current_unit in [ UnitTypeId.BARRACKSTECHLAB.value, UnitTypeId.BARRACKSREACTOR.value, UnitTypeId.FACTORYTECHLAB.value, UnitTypeId.FACTORYREACTOR.value, UnitTypeId.STARPORTTECHLAB.value, UnitTypeId.STARPORTREACTOR.value, ]: # Reactors and tech labs are not really part of the building, # so to get the abilities an appropriate building must be added. # Bare Reactor and TechLab have no abilities, so not matching them here. if self.current_unit in [ UnitTypeId.BARRACKSTECHLAB.value, UnitTypeId.BARRACKSREACTOR.value ]: ut = UnitTypeId.BARRACKS elif self.current_unit in [ UnitTypeId.FACTORYTECHLAB.value, UnitTypeId.FACTORYREACTOR.value ]: ut = UnitTypeId.FACTORY elif self.current_unit in [ UnitTypeId.STARPORTTECHLAB.value, UnitTypeId.STARPORTREACTOR.value ]: ut = UnitTypeId.STARPORT else: assert False, f"Type? {unit.type_id.name}" if len(self.all_own_units) > 1: assert len(self.all_own_units) == 2 and all( u.is_ready for u in self.all_own_units) # Building and addon both created else: await self._client.debug_create_unit( [[ut, 1, self._game_info.map_center, 1]]) await self._client.debug_kill_unit([unit.tag]) self.wait_steps = 100 self.__state = "BuildAddOn" return elif self.data_units[index]["needs_power"]: # Build pylon for protoss buildings that need it if len(self.all_own_units) > 1: assert len(self.all_own_units ) == 2, f"Units: {self.all_own_units}" assert all(u.is_ready for u in self.all_own_units) assert len(self.state.psionic_matrix.sources) == 1 # Pylon already created else: if self.current_unit == UnitTypeId.GATEWAY.value: # Disable autocast of warpgate morph await self._client.toggle_autocast( [unit], AbilityId.MORPH_WARPGATE) await self._client.debug_create_unit([[ UnitTypeId.PYLON, 1, self._game_info.map_center, 1 ]]) self.wait_steps = 200 return else: assert ( self.current_unit == unit.type_id.value ), f"{self.current_unit} == {unit.type_id.value} ({unit.type_id})" self.data_units[index]["cargo_capacity"] = if_nonzero( unit._proto.cargo_space_max) self.data_units[index]["max_health"] = unit._proto.health_max self.data_units[index]["max_shield"] = if_nonzero( unit._proto.shield_max) self.data_units[index]["detection_range"] = if_nonzero( unit._proto.detect_range) self.data_units[index]["start_energy"] = if_nonzero( unit._proto.energy, int) self.data_units[index]["max_energy"] = if_nonzero( unit._proto.energy_max) self.data_units[index]["radius"] = if_nonzero( unit._proto.radius) self.data_units[index][ "is_flying"] = unit.is_flying and unit.type_id != UnitTypeId.COLOSSUS # TODO: "placement_size" for buildings # Provided power radius power_sources = self.state.psionic_matrix.sources if len(power_sources) > 0: assert len(power_sources) == 1 self.data_units[index]["power_radius"] = power_sources[ 0].radius # Unit abilities try: abilities = (await self.get_available_abilities( [unit], ignore_resource_requirements=True))[0] # No requirements when all tech is locked self.data_units[index]["abilities"] = [ { "ability": a.value } for a in abilities if self.recognizes_ability(a.value) and self.ability_specialization_allowed_for( a.value, unit._proto.unit_type) ] # See requirement-depending upgrades with tech await self._client.debug_tech_tree() self.__state = "TechCheck" except ValueError as e: assert "is not a valid AbilityId" in repr(e), repr(e) # TODO: maybe skip the unit entirely self.__state = "Clear" elif self.__state == "BuildAddOn": assert len(self.all_own_units) == 1, f"? {self.all_own_units}" unit = self.all_own_units.first self.do(unit.build(UnitTypeId(self.current_unit))) self.wait_steps = 10 self.__state = "BuildAddOnWait" elif self.__state == "BuildAddOnWait": assert len(self.all_own_units) == 2, f"? {self.all_own_units}" if all(u.is_ready for u in self.all_own_units): self.__state = "WaitCreate" elif self.__state == "TechCheck": possible_units = [ u for u in self.all_own_units if u._proto.unit_type == self.current_unit ] if possible_units: unit = possible_units[0] assert unit.is_ready index = [ i for i, u in enumerate(self.data_units) if u["id"] == self.current_unit ][0] abilities = (await self.get_available_abilities( [unit], ignore_resource_requirements=True))[0] print("#", unit) for a in abilities: print(">", a) if not self.recognizes_ability(a.value): continue if not self.ability_specialization_allowed_for( a.value, unit._proto.unit_type): continue if a.value not in [ a["ability"] for a in self.data_units[index]["abilities"] ]: self.data_units[index]["abilities"].append({ "requirements": "???", "ability": a.value }) # Switch all tech back off await self._client.debug_tech_tree() self.__state = "Clear" elif self.__state == "WaitCreate": if len(self.all_own_units) == 0: self.time_left -= 1 if self.time_left < 0: self.__state = "Clear" elif self.__state == "WaitEmpty": if len(self.all_own_units) > 0: self.time_left -= 1 if self.time_left < 0: assert False, "Clear failed" else: # Kill broodlings etc for u in self.all_units: await self._client.debug_kill_unit([u.tag]) self.wait_steps = 20 else: self.__state = "Empty" if self.__state == "Clear": for u in self.all_units: await self._client.debug_kill_unit([u.tag]) self.wait_steps = 20 self.current_unit = None self.__state = "WaitEmpty" self.time_left = 10
def publish( ctx, test: bool = False, install: bool = False, n_download_tries: int = 3 ) -> None: """ Publish the project to pypi / testpypi. If you use the test flag, you have at least the following in `~/.pypirc`: [testpypi] repository: https://test.pypi.org/legacy/ :param ctx: invoke context :param test: whether to publish to normal or test pypi. If publishing to testpypi, .dev<dev_num> is added to the version where <dev_num> is one larger than the highest dev version published. This is because testpypi won't let you publish the same version multiple times; doing this automates changing the version for repeat publishing + testing. Additionally, the micro/patch version is incremented because it's assumed that it's a dev version of the _next_ release. WARNING - don't publish multiple times too quickly. If so, the next dev num can't be pulled from testpypi because it won't have updated yet. :param install: whether to install the project from test pypi. Only used if `test` is true. This is better than running `invoke install` separately because it will try multiple times to get the newly uploaded version (it usually takes a couple of tries). :param n_download_tries: how many times to attempt to install the project. After each attempt there is a 5 second sleep period. """ project_name = _get_from_pyproject(["tool", "poetry", "name"]) project_root = str(Path(__file__).parent.resolve()) sleep_time = 5 if test: pyproject_path = Path(__file__).parent / "pyproject.toml" original_pyproject_str = pyproject_path.read_text() pyproject = toml.loads(original_pyproject_str) original_version = pyproject["tool"]["poetry"]["version"] version = re.fullmatch(_version_pattern, original_version) groups = version.groupdict() major, minor, micro = groups["major"], groups["minor"], groups["micro"] version = f"{major}.{minor}.{int(micro) + 1}" dev_num = _get_next_dev_num(project_name, version) version += f".dev{dev_num}" # write back the modified version pyproject["tool"]["poetry"]["version"] = version pyproject_path.write_text(toml.dumps(pyproject)) try: cmd = f""" cd "{project_root}" poetry build twine upload {'--repository testpypi' if test else ''} dist/* """ ctx.run(cmd) finally: if test: pyproject_path.write_text(original_pyproject_str) if not test or not install: return for i in range(n_download_tries): sleep(sleep_time) try: result = ctx.run( f"pip install {_index_url} {_extra_url} {project_name}=={version}" ) break except UnexpectedExit: continue
def to_toml(self, value): """Convert the value to TOML""" return toml.dumps(value)
def serialize(self) -> str: toml_values = self.normalize() return toml.dumps(toml_values)
def generate_field_file(csv_filename, ff_filename=None, ext=DEFAULT_EXTENSION, delimiter=","): toml_dict: dict = {} if not ext.startswith("."): ext = f".{ext}" if ff_filename is None: if csv_filename.startswith("http://") or csv_filename.startswith( "https://"): ff_filename = csv_filename.split('/')[-1] ff_filename = os.path.splitext(ff_filename)[0] + ext else: ff_filename = os.path.splitext(csv_filename)[0] + ext reader = FileReader(csv_filename, has_header=True, delimiter=delimiter) first_line = next(reader.readline()) header_line = reader.header_line if len(first_line) > len(header_line): raise ValueError( f"Header line has more columns than first " "line: {len(column_names)} > {len(column_values)}") elif len(first_line) < len(header_line): raise ValueError( f"Header line has less columns" "than first line: {len(column_names)} < {len(column_values)}") else: for i, (key, value) in enumerate(zip(header_line, first_line)): value = value.strip() if value == "": value = f"blank-{i}" # print( i ) if value.startswith('"'): # strip out quotes if they exist value = value.strip('"') if value.startswith("'"): value = value.strip("'") key = key.replace('$', '_') # not valid keys for mongodb key = key.replace('.', '_') # not valid keys for mongodb t = Converter.guess_type(value) key = key.strip() # remove any white space inside quotes toml_dict[key] = {} toml_dict[key]["type"] = t toml_dict[key]["name"] = key # ff_file.write(f"[{name}]\n") # ff_file.write(f"type={t}\n") # ff_file.write(f"name={name}") with open(ff_filename, "w") as ff_file: #print(toml_dict) toml_string = toml.dumps(toml_dict) ff_file.write("#\n") ts = datetime.utcnow() ff_file.write( f"# Created '{ff_filename}' at UTC:{ts} by class {__name__}\n" ) ff_file.write("#\n") ff_file.write(toml_string) ff_file.write(f"#end\n") return FieldFile(ff_filename)
def dumps(self) -> str: data = { 'action': [self._process_action(a) for a in self.shortcut.actions], } return toml.dumps(data)
def create_gobgp_config(self): config = { 'global': { 'config': { 'as': self.asn, 'router-id': self.router_id }, 'use-multiple-paths': { 'config': { 'enabled': True } } } } for peer, info in self.peers.iteritems(): if info['interface'] == '': if self.asn == peer.asn: peer_type = self.PEER_TYPE_INTERNAL else: peer_type = self.PEER_TYPE_EXTERNAL afi_safi_list = [] version = netaddr.IPNetwork(info['neigh_addr']).version if version == 4: afi_safi_list.append( {'config': { 'afi-safi-name': 'ipv4-unicast' }}) elif version == 6: afi_safi_list.append( {'config': { 'afi-safi-name': 'ipv6-unicast' }}) else: Exception( 'invalid ip address version. {0}'.format(version)) n = { 'config': { 'neighbor-address': info['neigh_addr'], 'peer-as': peer.asn, 'local-as': self.asn, }, 'afi-safis': afi_safi_list, } else: afi_safi_list = [ { 'config': { 'afi-safi-name': 'ipv4-unicast' } }, { 'config': { 'afi-safi-name': 'ipv6-unicast' } }, ] n = { 'config': { 'neighbor-interface': info['interface'] }, 'afi-safis': afi_safi_list } if len(info['passwd']) > 0: n['config']['auth-password'] = info['passwd'] if info['evpn']: afi_safi_list.append( {'config': { 'afi-safi-name': 'l2vpn-evpn' }}) if info['passive']: n['transport'] = {'config': {'passive-mode': True}} if info['is_rs_client']: n['route-server'] = {'config': {'route-server-client': True}} if 'neighbors' not in config: config['neighbors'] = [] config['neighbors'].append(n) if not self.bgp_remote: return config with open('{0}/gobgpd.conf'.format(self.config_dir), 'w') as f: f.write(toml.dumps(config))
def test_commutativity(): o = toml.loads(toml.dumps(TEST_DICT)) assert o == toml.loads(toml.dumps(o))
def absent(module, dest, conf, jsonbool, backup): b_dest = to_bytes(dest, errors='surrogate_or_strict') if not os.path.exists(b_dest): module.exit_json(changed=False, msg="file not present") msg = '' diff = { 'before': '', 'after': '', 'before_header': '%s (content)' % dest, 'after_header': '%s (content)' % dest } f = open(b_dest, 'rb') b_lines = f.readlines() f.close() lines = to_native(b('').join(b_lines)) b_conf = to_bytes(conf, errors='surrogate_or_strict') lines = to_native(b('').join(b_lines)) tomlconfig = pytoml.loads(lines) config = {} if jsonbool: config = eval(b_conf) else: config = pytoml.loads(b_conf) if not isinstance(config, dict): if jsonbool: module.fail_json( msg="Invalid value in json parameter: {0}".format(config)) else: module.fail_json( msg="Invalid value in toml parameter: {0}".format(config)) if module._diff: diff['before'] = to_native(b('').join(b_lines)) b_lines_new = b_lines msg = '' changed = False diffconfig = deepdiff(tomlconfig, config) if diffconfig is None: diffconfig = {} if tomlconfig != diffconfig: b_lines_new = to_bytes(pytoml.dumps(diffconfig)) msg = 'config removed' changed = True if module._diff: diff['after'] = to_native(b('').join(b_lines_new)) backupdest = "" if changed and not module.check_mode: if backup: backupdest = module.backup_local(dest) write_changes(module, b_lines_new, dest) attr_diff = {} msg, changed = check_file_attrs(module, changed, msg, attr_diff) attr_diff['before_header'] = '%s (file attributes)' % dest attr_diff['after_header'] = '%s (file attributes)' % dest difflist = [diff, attr_diff] module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def to_toml(rup): """ :param rup: a rupture instance :returns: a TOML string """ return toml.dumps(rup.todict())
def to_toml_string(self): return str(toml.dumps(self.to_dict())).strip().split('\n')
def __str__(self): return toml.dumps(self.dump()).replace(',]\n', ' ]\n')
def write_metadata(self, metadata: dict, comment_wrap=False) -> str: """Write metadata in this extractor’s format.""" import toml return '\n'.join(('+++', toml.dumps(metadata).strip(), '+++', ''))
def show_config(): """Show all the config options.""" SKIP_SECTIONS = ("_test", ) out = [] out.append( _clean(""" # Below are all the sections and options you can have in ~/.streamlit/config.toml. """)) def append_desc(text): out.append(click.style(text, bold=True)) def append_comment(text): out.append(click.style(text)) def append_section(text): out.append(click.style(text, bold=True, fg="green")) def append_setting(text): out.append(click.style(text, fg="green")) def append_newline(): out.append("") for section, section_description in _section_descriptions.items(): if section in SKIP_SECTIONS: continue append_newline() append_section("[%s]" % section) append_newline() for key, option in _config_options.items(): if option.section != section: continue if option.visibility == "hidden": continue if option.is_expired(): continue key = option.key.split(".")[1] description_paragraphs = _clean_paragraphs(option.description) for i, txt in enumerate(description_paragraphs): if i == 0: append_desc("# %s" % txt) else: append_comment("# %s" % txt) toml_default = toml.dumps({"default": option.default_val}) toml_default = toml_default[10:].strip() if len(toml_default) > 0: append_comment("# Default: %s" % toml_default) else: # Don't say "Default: (unset)" here because this branch applies # to complex config settings too. pass if option.deprecated: append_comment("#") append_comment("# " + click.style("DEPRECATED.", fg="yellow")) append_comment( "# %s" % "\n".join(_clean_paragraphs(option.deprecation_text))) append_comment( "# This option will be removed on or after %s." % option.expiration_date) append_comment("#") option_is_manually_set = (option.where_defined != ConfigOption.DEFAULT_DEFINITION) if option_is_manually_set: append_comment("# The value below was set in %s" % option.where_defined) toml_setting = toml.dumps({key: option.value}) if len(toml_setting) == 0: toml_setting = "#%s =\n" % key append_setting(toml_setting) click.echo("\n".join(out))
def test_bug_148(): assert 'a = "\\u0064"\n' == toml.dumps({'a': '\\x64'}) assert 'a = "\\\\x64"\n' == toml.dumps({'a': '\\\\x64'}) assert 'a = "\\\\\\u0064"\n' == toml.dumps({'a': '\\\\\\x64'})