def _clean_non_serializables(data: Dict) -> Dict: copy = {} for key, value in data.items(): if callable(value): continue if hasattr(value, "to_dict"): value = value.to_dict() if isinstance(value, dict): value = _clean_non_serializables(value) if isinstance(value, bytes): value = to_hex(value) if isinstance(value, tuple): value = list(value) if isinstance(key, Enum): key = key.name if isinstance(value, Enum): value = value.value if value and not isinstance(value, builtin_types): try: pytoml.dumps({key: value}) except RuntimeError: continue copy[key] = value return copy
def _clean_non_serializables(data): copy = {} for key, value in data.items(): if callable(value): continue if hasattr(value, 'to_dict'): value = value.to_dict() if isinstance(value, dict): value = _clean_non_serializables(value) if isinstance(value, bytes): value = serialize_bytes(value) if isinstance(value, tuple): value = list(value) if isinstance(key, Enum): key = key.name if isinstance(value, Enum): value = value.value if value and not isinstance(value, builtin_types): try: pytoml.dumps({key: value}) except RuntimeError: continue copy[key] = value return copy
def update_config(cfg): deps = get_dependencies(cfg) snakeye = snakeye_metadata(cfg) flit = flit_metadata(cfg) return TEMPLATE.format(metadata=toml.dumps(flit), snakeye_metadata=toml.dumps(snakeye), dependencies=toml.dumps(deps))
def main(filesystem, locator, link_dir, handle_error): with filesystem.open(locator.root.descendant("virtualenvs.toml")) as venvs: contents = pytoml.load(venvs) for name, config in contents["virtualenv"].items(): config.setdefault("sys.version", sys.version) virtualenv = locator.for_name(name=name) existing_config_path = virtualenv.path.descendant("installed.toml") try: with filesystem.open(existing_config_path) as existing_config: if pytoml.loads(existing_config.read()) == config: continue except FileNotFound: virtualenv.create() else: virtualenv.recreate_on(filesystem=filesystem) packages, requirements = _to_install(config=config) try: virtualenv.install(packages=packages, requirements=requirements) except Exception: handle_error(virtualenv) continue for link in config.get("link", []): _link( source=virtualenv.binary(name=link), to=link_dir.descendant(link), filesystem=filesystem, ) with filesystem.open(existing_config_path, "w") as existing_config: existing_config.write(pytoml.dumps(config).encode("utf-8"))
def dumps(self, data, sort=False): # type: (...) -> str if not isinstance(data, TOMLFile): data = toml.dumps(data, sort_keys=sort) else: data = dumps(data) return data
def toml_dump(data): dump = pytoml.dumps(data) if isinstance(data, dict): for k, v in data.items(): if all(isinstance(v2, dict) for v2 in v.values()): dump = dump.replace('[%s]' % k, '') return dump.strip()
def serialise(self, fields=("name", "summary"), recurse=True, format=None): if format == "pinned": # user-specified fields are ignored/invalid in this case fields = ("pinned",) data = [OrderedDict([(f, getattr(self, f, None)) for f in fields])] if format == "human": table = gen_table(self, extra_cols=fields) tabulate.PRESERVE_WHITESPACE = True return tabulate.tabulate(table, headers="keys") if recurse and self.requires: deps = flatten_deps(self) next(deps) # skip over root data += [d for dep in deps for d in dep.serialise(fields=fields, recurse=False)] if format is None or format == "python": result = data elif format == "json": result = json.dumps(data, indent=2, default=str, separators=(",", ": ")) elif format == "yaml": result = oyaml.dump(data) elif format == "toml": result = "\n".join([pytoml.dumps(d) for d in data]) elif format == "pinned": result = "\n".join([d["pinned"] for d in data]) else: raise Exception("Unsupported format") return result
def _migrate(registries_conf_file, output_file): toml = pytoml.dumps(loadYAML.get_registries(registries_conf_file)) if output_file: with open(output_file,"w") as f: f.write(toml) else: print(toml)
def dumps(self, obj): try: return pytoml.dumps(obj, sort_keys=True) except Exception as e: raise j.exceptions.Value("Toml serialization failed", data=obj, exception=e)
def fill_volumes_with_model(model_file, volumes, filename, resume_filename=None, partition=False, viewer=False, **kwargs): if '{volume}' not in filename: raise ValueError( 'HDF5 filename must contain "{volume}" for volume name replacement.' ) if resume_filename is not None and '{volume}' not in resume_filename: raise ValueError( 'TOML resume filename must contain "{volume}" for volume name replacement.' ) if partition: _, volumes = partition_volumes(volumes) for volume_name, volume in six.iteritems(volumes): logging.info('Filling volume %s...', volume_name) volume = volume.downsample(CONFIG.volume.resolution) if resume_filename is not None: resume_volume_filename = resume_filename.format(volume=volume_name) resume_volume = six.next( six.itervalues(HDF5Volume.from_toml(resume_volume_filename))) resume_prediction = resume_volume.to_memory_volume().label_data else: resume_prediction = None volume_filename = filename.format(volume=volume_name) checkpoint_filename = volume_filename + '_checkpoint' prediction, conflict_count = fill_volume_with_model( model_file, volume, resume_prediction=resume_prediction, checkpoint_filename=checkpoint_filename, **kwargs) config = HDF5Volume.write_file(volume_filename + '.hdf5', CONFIG.volume.resolution, label_data=prediction) config['name'] = volume_name + ' segmentation' with open(volume_filename + '.toml', 'wb') as tomlfile: tomlfile.write('# Filling model: {}\n'.format(model_file)) tomlfile.write('# Filling kwargs: {}\n'.format(str(kwargs))) tomlfile.write(str(toml.dumps({'dataset': [config]}))) if viewer: viewer = WrappedViewer( voxel_size=list(np.flipud(CONFIG.volume.resolution))) subvolume = volume.get_subvolume( SubvolumeBounds(start=np.zeros(3, dtype=np.int64), stop=volume.shape)) viewer.add(subvolume.image, name='Image') viewer.add(prediction, name='Labels') viewer.add(conflict_count, name='Conflicts') viewer.print_view_prompt()
def __str__(self): sanitized = {} for k, v in six.iteritems(self.__dict__): if isinstance(v, np.ndarray): sanitized[k] = v.tolist() else: sanitized[k] = v return toml.dumps(sanitized)
def _main(): ap = argparse.ArgumentParser() ap.add_argument('-d', '--dir', action='append') ap.add_argument('testcase', nargs='*') args = ap.parse_args() if not args.dir: args.dir = [os.path.join(os.path.split(__file__)[0], 'toml-test/tests')] succeeded = [] failed = [] for path in args.dir: if not os.path.isdir(path): print('error: not a dir: {0}'.format(path)) return 2 for top, dirnames, fnames in os.walk(path): for fname in fnames: if not fname.endswith('.toml'): continue if args.testcase and not any(arg in fname for arg in args.testcase): continue parse_error = None try: with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin) except toml.TomlError: parsed = None parse_error = sys.exc_info() else: dumped = toml.dumps(parsed) parsed2 = toml.loads(dumped) if parsed != parsed2: failed.append((fname, None)) continue with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin, translate=_testbench_literal) try: with io.open(os.path.join(top, fname[:-5] + '.json'), 'rt', encoding='utf-8') as fin: bench = json.load(fin) except IOError: bench = None if parsed != adjust_bench(bench): failed.append((fname, parsed, bench, parse_error)) else: succeeded.append(fname) for f, parsed, bench, e in failed: print('failed: {}\n{}\n{}'.format(f, json.dumps(parsed, indent=4), json.dumps(bench, indent=4))) if e: traceback.print_exception(*e) print('succeeded: {0}'.format(len(succeeded))) return 1 if failed or not succeeded else 0
def _main(): ap = argparse.ArgumentParser() ap.add_argument('-d', '--dir', action='append') ap.add_argument('testcase', nargs='*') args = ap.parse_args() if not args.dir: args.dir = [os.path.join(os.path.split(__file__)[0], 'toml-test/tests')] succeeded = [] failed = [] for path in args.dir: if not os.path.isdir(path): print('error: not a dir: {}'.format(path)) return 2 for top, dirnames, fnames in os.walk(path): for fname in fnames: if not fname.endswith('.toml'): continue if args.testcase and not any(arg in fname for arg in args.testcase): continue parse_error = None try: with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin) except toml.TomlError: parsed = None parse_error = sys.exc_info() else: dumped = toml.dumps(parsed) parsed2 = toml.loads(dumped) if parsed != parsed2: failed.append((fname, None)) continue with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin, translate=_testbench_literal) try: with io.open(os.path.join(top, fname[:-5] + '.json'), 'rt', encoding='utf-8') as fin: bench = json.load(fin) except IOError: bench = None if parsed != adjust_bench(bench): failed.append((fname, parsed, bench, parse_error)) else: succeeded.append(fname) for f, parsed, bench, e in failed: print('failed: {}\n{}\n{}'.format(f, json.dumps(parsed, indent=4), json.dumps(bench, indent=4))) if e: traceback.print_exception(*e) print('succeeded: {}'.format(len(succeeded))) return 1 if failed or not succeeded else 0
def render_toml(obj): """ Serializes the specified object to TOML. :param obj: the object to serialize :returns: str """ return pytoml.dumps(obj)
def save_config(config, path): config['revision'] = get_revision() lines = [] for line in toml.dumps(config).splitlines(): if line.startswith('['): lines.append('') lines.append(line) with open(path, 'w') as f: f.write('\n'.join(lines))
def configSave(self, config=None, path=""): """ """ if self.executor.state_disabled: return if self.readonly: raise j.exceptions.Input( message="cannot write config to '%s', because is readonly" % self, level=1, source="", tags="", msgpub="") if config and path: data = pytoml.dumps(config) self.executor.file_write(path, data) return data = pytoml.dumps(self._configJS) self.executor.file_write(self.configJSPath, data, sudo=True) data = pytoml.dumps(self._configState) self.executor.file_write(self.configStatePath, data, sudo=True) self.executor.reset() # make sure all caching is reset
def create_hugo_post(content, dest_dir): """Create a hugo post from the given content.""" text = content.pop("content") post = "+++\n{}+++\n\n{}\n".format(pytoml.dumps(content), text.strip()) issue_dir = join(dest_dir, "issue-{}".format(content["issue"])) makedirs(issue_dir, exist_ok=True) post_path = join(issue_dir, "{}.md".format(slugify(content["title"]))) with open(post_path, "w") as f: f.write(post)
def write_out_file(front_data, body_text, out_file_path): out_lines = ['+++'] front_string = toml.dumps(front_data) out_lines.extend(front_string.splitlines()) out_lines.append('+++') out_lines.extend(body_text.splitlines()) with open(out_file_path, 'w') as f: f.write('\n'.join(out_lines))
def dump_module(header, module): attribs = dict() for name, value in inspect.getmembers(module): if name.isupper(): attribs[name] = value attribs = _clean_non_serializables(attribs) print(pytoml.dumps({header: attribs})) print()
def dump_module(header: str, module: Any) -> None: attribs = dict() for name, value in inspect.getmembers(module): if name.isupper(): attribs[name] = value attribs = _clean_non_serializables(attribs) print(pytoml.dumps({header: attribs})) print()
def dumps(d): """dump dict object into toml stream Arguments: d (dict) : the dict which will be dumped Returns: string : the toml stream """ s = pytoml.dumps(d) return s
def main(): parser = argparse.ArgumentParser( description=f'SSH bastion server {__version__}') parser.add_argument('-c', '--config', default='~/.sshgateway/config.toml') parser.add_argument('--show-config', dest='show', action='store_true', default=False) parser.add_argument('-v', dest='verbose', action='count', default=1) args = parser.parse_args() config['verbose'] = args.verbose try: with open(expanduser(args.config)) as configfile: config.update(pytoml.loads(configfile.read())) except FileNotFoundError: print('Can not find configuration file. use default config') except PermissionError: print('Can not read configuration file.', file=sys.stderr) sys.exit(1) except pytoml.TomlError as e: print(f'Invalid configuration file: {e}', file=sys.stderr) if args.show: print(pytoml.dumps(config)) sys.exit(0) for host in config['hosts']: host.setdefault('proxy', None) try: config['hosts'] = [Host(**host) for host in config['hosts']] for host in config['hosts']: host_dict[host.name] = host config['groups'] = [Group(**group) for group in config['groups']] config['permissions'] = [ Permission(**perm) for perm in config['permissions'] ] except Exception as e: print(f'Invalid configuration file: {e}', file=sys.stderr) sys.exit(1) loop = asyncio.get_event_loop() try: loop.run_until_complete(start_server()) except (OSError, asyncssh.Error) as exc: sys.exit('Error starting server: ' + str(exc)) try: loop.run_forever() except KeyboardInterrupt: pass finally: loop.run_until_complete(loop.shutdown_asyncgens()) loop.close()
def add(index_path, key, value): index = pytoml.loads(subprocess.check_output(decrypt + [index_path])) if key not in index: index[key] = value cmd = encrypt + ['--yes', '--output', index_path] with subprocess.Popen(cmd, stdin=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) as gpg: err = gpg.communicate(pytoml.dumps(index))[1].encode() if gpg.wait() != 0: raise subprocess.CalledProcessError(gpg.poll(), cmd, err)
def dumps(data, meta_format="yaml"): body = data.pop("body", "") file_string = "---" + meta_format + new_line if meta_format == "yaml": file_string += yaml.dump(data, default_flow_style=False) elif meta_format == "json": file_string += json.dumps(data) elif meta_format == "toml": file_string += toml.dumps(data) file_string = new_line + "---" + new_line + body return file_string
def initialise(self): if (self.directory / 'pyproject.toml').exists(): resp = input("pyproject.toml exists - overwrite it? [y/N]: ") if (not resp) or resp[0].lower() != 'y': return module = self.prompt_text('Module name', self.guess_module_name(), str.isidentifier) author = self.prompt_text('Author', self.defaults.get('author'), lambda s: s != '') author_email = self.prompt_text('Author email', self.defaults.get('author_email'), self.validate_email) if 'home_page_template' in self.defaults: home_page_default = self.defaults['home_page_template'].replace( '{modulename}', module) else: home_page_default = None home_page = self.prompt_text( 'Home page', home_page_default, self.validate_homepage, retry_msg="Should start with http:// or https:// - try again.") license = self.prompt_options( 'Choose a license (see http://choosealicense.com/ for more info)', license_choices, self.defaults.get('license')) self.update_defaults(author=author, author_email=author_email, home_page=home_page, module=module, license=license) metadata = OrderedDict([ ('module', module), ('author', author), ]) if author_email: metadata['author-email'] = author_email if home_page: metadata['home-page'] = home_page if license != 'skip': metadata['classifiers'] = [license_names_to_classifiers[license]] self.write_license(license, author) with (self.directory / 'pyproject.toml').open('w', encoding='utf-8') as f: f.write(TEMPLATE.format(metadata=toml.dumps(metadata))) print() print( "Written pyproject.toml; edit that file to add optional extra info." )
def __str__(self): sanitized = {} for n, c in six.iteritems(self.__dict__): if not isinstance(c, BaseConfig): sanitized[n] = c continue sanitized[n] = {} for k, v in six.iteritems(c.__dict__): if isinstance(v, np.ndarray): sanitized[n][k] = v.tolist() else: sanitized[n][k] = v return toml.dumps(sanitized)
def write_toml(path, contents, auto_generated_from=None, keys_to_convert_to_list=None, types_to_str=(enum.Enum, )): convert_dict_item_type(contents, _type=types_to_str, func=str) if keys_to_convert_to_list: convert_dict_keys(contents, keys=keys_to_convert_to_list, func=list) dump = pytoml.dumps(contents) if auto_generated_from: dump = f"# NOTE: THIS FILE IS AUTO GENERATED BASED ON THE ANALYSIS OF {auto_generated_from}.\n# DO NOT MODIFY THIS FILE DIRECTLY.\n{dump}" with open(path, 'w') as the_file: the_file.write(dump) return dump
def new(title): article_info = { "title": title, "create_time": datetime.datetime.now().isoformat("T") } article_default_text = "%s+++++++\n\n##1" % pytoml.dumps(article_info) if os.path.isfile("article/%s.md" % title): print("Error: Article '%s' is existed." % title) return file = open("article/%s.md" % title, "wt") file.write(article_default_text) file.close()
def claim(simulation): """Clearly not thread safe, but it'll do the trick.""" s3 = boto3.resource('s3') key = os.path.join(os.path.basename(simulation['directory']), 'simulation.toml') try: remote = toml.loads( s3.Object('xcite-simulations', key).get()['Body'].read()) except BotoClientError: body = io.BytesIO(toml.dumps(simulation).encode('utf-8')) s3.Object('xcite-simulations', key).put(Body=body) return True else: return simulation['server'] == remote['server']
def serialize(obj, **options): """ Serialize Python data to TOML. :param obj: the data structure to serialize. :param options: options given to lower pytoml module. """ try: if "file_out" in options: return toml.dump(obj, options["file_out"], **options) else: return toml.dumps(obj, **options) except Exception as error: # pylint: disable=broad-except raise SerializationError(error)
def serialize(obj, **options): ''' Serialize Python data to TOML. :param obj: the data structure to serialize. :param options: options given to lower pytoml module. ''' try: if 'file_out' in options: return toml.dump(obj, options['file_out'], **options) else: return toml.dumps(obj, **options) except Exception as error: raise SerializationError(error)
def encode_toml(data, ordered): try: return pytoml.dumps(data, sort_keys=not ordered) except AttributeError as e: if str(e) == "'list' object has no attribute 'keys'": raise ValueError('Cannot convert non-dictionary data to ' 'TOML; use "wrap" to wrap it in a ' 'dictionary') else: raise e except TypeError as e: if str(e) == "'in <string>' requires string as left operand, not int": raise ValueError('Cannot convert binary to TOML') else: raise ValueError('Cannot convert data to TOML ({0})'.format(e))
def convert(path): cp = configparser.ConfigParser() with path.open(encoding='utf-8') as f: cp.read_file(f) ep_file = Path('entry_points.txt') metadata = OrderedDict() for name, value in cp['metadata'].items(): if name in metadata_list_fields: metadata[name] = [l for l in value.splitlines() if l.strip()] elif name == 'entry-points-file': ep_file = Path(value) else: metadata[name] = value if 'scripts' in cp: scripts = OrderedDict(cp['scripts']) else: scripts = {} entrypoints = CaseSensitiveConfigParser() if ep_file.is_file(): with ep_file.open(encoding='utf-8') as f: entrypoints.read_file(f) written_entrypoints = False with Path('pyproject.toml').open('w', encoding='utf-8') as f: f.write(TEMPLATE.format(metadata=pytoml.dumps(metadata))) if scripts: f.write('\n[tool.flit.scripts]\n') pytoml.dump(scripts, f) for groupname, group in entrypoints.items(): if not dict(group): continue if '.' in groupname: groupname = '"{}"'.format(groupname) f.write('\n[tool.flit.entrypoints.{}]\n'.format(groupname)) pytoml.dump(OrderedDict(group), f) written_entrypoints = True print("Written 'pyproject.toml'") files = str(path) if written_entrypoints: files += ' and ' + str(ep_file) print("Please check the new file, then remove", files)
def _update_entry_to_local_data(entry, local_files_data): add_to_end = True if entry["name"] in local_files_data.items(): local_files_data["version"] = entry["version"] local_files_data["checksum"] = entry["checksum"] add_to_end = False if add_to_end: local_files_data[entry["name"]] = entry f = open(path_finder.get_local_version_data_path(),'w') f.write(toml.dumps(local_files_data)) f.flush() f.close()
def writeVSphereSettings(knobName=DEFAULT_VSPHERE_KNOB, knobDirectory=DEFAULT_KNOB_DIRECTORY, hostname=None, username=None, password=None): ''' Write the vsphere host information to a knob file so we don't have to determine it every single time we read a tag. :param str knobName: What file name to write the vsphere data to :param str knobDirectory: What directory to write the knob file in :param str hostname: hostname of the vSphere host :param str user: username to connect to vSphere :param str password: password to connect to vSphere ''' assert isinstance(hostname, basestring), ("hostname must be a string but is %r" % hostname) assert isinstance( knobDirectory, basestring), ("knobDirectory must be a string but is %r" % knobDirectory) assert isinstance(knobName, basestring), ("knobName must be a string but is %r" % knobName) assert isinstance(password, basestring), ("password must be a string but is %r" % password) assert isinstance(username, basestring), ("username must be a string but is %r" % username) loadSharedLogger() knobPath = "%s/%s" % (knobDirectory, knobName) if not os.path.isdir(knobDirectory): this.logger.info( 'writeVsphereSettings: directory %s does not exist, creating it', knobDirectory) systemCall('mkdir -p %s' % knobDirectory) with open(knobPath, 'w') as knobFile: this.logger.info('Writing vSphere connection info to %s', knobPath) settings = {} settings['hostname'] = hostname settings['username'] = username settings['password'] = password knobFile.write(toml.dumps(settings))
def main(filesystem, locator, link_dir, handle_error): with filesystem.open(locator.root.descendant("virtualenvs.toml")) as venvs: contents = pytoml.load( venvs, object_pairs_hook=collections.OrderedDict, ) progress = tqdm(contents["virtualenv"].items()) for name, config in progress: progress.set_description(name) python = config.pop("python", sys.executable) config.setdefault( "sys.version", subprocess.check_output( [python, "--version"], stderr=subprocess.STDOUT, ).decode('ascii'), ) virtualenv = locator.for_name(name=name) existing_config_path = virtualenv.path.descendant("installed.toml") try: with filesystem.open(existing_config_path) as existing_config: if pytoml.loads(existing_config.read()) == config: continue except FileNotFound: virtualenv.create(python=python) else: virtualenv.recreate_on(filesystem=filesystem, python=python) packages, requirements = _to_install(config=config) try: virtualenv.install(packages=packages, requirements=requirements) except Exception: handle_error(virtualenv) continue for link in config.get("link", []): _link( source=virtualenv.binary(name=link), to=link_dir.descendant(link), filesystem=filesystem, ) with filesystem.open(existing_config_path, "wt") as existing_config: existing_config.write(pytoml.dumps(config))
def _main(): succeeded = [] failed = [] for top, dirnames, fnames in os.walk('.'): for fname in fnames: if not fname.endswith('.toml'): continue if sys.argv[1:] and not any(arg in fname for arg in sys.argv[1:]): continue parse_error = None try: with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin) except toml.TomlError: parsed = None parse_error = sys.exc_info() else: dumped = toml.dumps(parsed) parsed2 = toml.loads(dumped) if parsed != parsed2: failed.append((fname, None)) continue with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin, translate=_testbench_literal) try: with io.open(os.path.join(top, fname[:-5] + '.json'), 'rt', encoding='utf-8') as fin: bench = json.load(fin) except IOError: bench = None if parsed != bench: failed.append((fname, parsed, bench, parse_error)) else: succeeded.append(fname) for f, parsed, bench, e in failed: print('failed: {}\n{}\n{}'.format(f, json.dumps(parsed, indent=4), json.dumps(bench, indent=4))) if e: traceback.print_exception(*e) print('succeeded: {}'.format(len(succeeded))) return 1 if failed else 0
def serialize_to_main_conf(keys : list, values : list): config = None print("Setting keys: ", keys, " to values: ", values) with open(os.path.join(config_path, "main_config.toml")) as conffile: config = toml.load(conffile) for i in range(0, len(keys)): config[keys[i]] = values[i] global main_conf_values main_conf_values = config if config is not None: f = open(os.path.join(config_path, "main_config.toml"),'w') f.write(toml.dumps(config)) f.flush() f.close()
def _main(): succeeded = [] failed = [] for top, dirnames, fnames in os.walk('.'): for fname in fnames: if not fname.endswith('.toml'): continue try: with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin) except toml.TomlError: parsed = None else: dumped = toml.dumps(parsed) parsed2 = toml.loads(dumped) if parsed != parsed2: failed.append(fname) continue with open(os.path.join(top, fname), 'rb') as fin: parsed = toml.load(fin, _testbench_literal, _testbench_array) try: with io.open(os.path.join(top, fname[:-5] + '.json'), 'rt', encoding='utf-8') as fin: bench = json.load(fin) except IOError: bench = None if parsed != bench: failed.append(fname) else: succeeded.append(fname) for f in failed: print('failed: {}'.format(f)) print('succeeded: {}'.format(len(succeeded))) return 1 if failed else 0
def configure(self, service_obj): import pytoml """ this gets executed when files are installed this step is used to do configuration steps to the platform after this step the system will try to start the ays if anything needs to be started """ # for backwards compatibility base = '/opt/jumpscale7/apps/agentcontroller2' toml = '/opt/jumpscale7/apps/agentcontroller2/agentcontroller2.toml' cfg = pytoml.load(open(toml)) redis = service_obj.hrd.get('instance.param.redis.host') cfg['main']['redis_host'] = redis cfg['main']['redis_password'] = service_obj.hrd.get('instance.param.redis.password') # configure env var for events handlers redis_host, _, redis_port = redis.partition(':') cfg['events']['settings']['redis_address'] = redis_host cfg['events']['settings']['redis_port'] = redis_port cfg['events']['settings']['redis_password'] = service_obj.hrd.get('instance.param.redis.password') syncthing = j.atyourservice.get(name='syncthing') cfg['events']['settings']['syncthing_url'] = 'http://localhost:%s/' % syncthing.hrd.get('instance.param.port') content = pytoml.dumps(cfg) j.system.fs.writeFile(filename=toml, contents=content) # Start script syncing (syncthing) jumpscripts = j.system.fs.joinPaths(base, 'jumpscripts') j.system.fs.createDir(jumpscripts) syncthing_id = syncthing.actions.get_syncthing_id(syncthing) folderid = 'jumpscripts-%s' % hashlib.md5(syncthing_id).hexdigest() syncthing.actions.add_folder(syncthing, folderid, jumpscripts)
def _update_checksum_entry(entry, local_checksum_data): file_path = os.path.join(path_finder.get_nwn_path(), entry["target_dir"]) file_path = os.path.join(file_path, entry["name"]) print("Doing checksum for: ", entry["name"]) # Skip generating a checksum for portraits, since there's so bloody many of them! checksum = "portrait" if entry["target_dir"] != "portraits": checksum = _generate_file_md5(file_path) else: return print ("Checksum: ", checksum) modified_at = time.ctime(os.path.getmtime(file_path)) print ("Modified at: ", modified_at) checksum_entry = {} checksum_entry["name"] = entry["name"] checksum_entry["checksum"] = checksum checksum_entry["modified"] = modified_at found_entry = False if checksum_entry["name"] in local_checksum_data: found_entry = True local_checksum_data[checksum_entry["name"]]["checksum"] = checksum_entry["checksum"] if found_entry == False: local_checksum_data[checksum_entry["name"]] = checksum_entry f = open(path_finder.get_local_checksums_path(),'w') f.write(toml.dumps(local_checksum_data)) f.flush() f.close() return checksum_entry
def get_dot_rmotr_as_toml(self): return toml.dumps({ 'uuid': self.uuid, 'name': self.name })
frontmatter, article_contents = file_data.split('---') metadata = yaml.load(frontmatter) _, filename = os.path.split(arguments['<in_path>']) # Generate the output file. created_date = time.strptime(metadata['created'], '%B %d, %Y') formatted_date = time.strftime('%Y-%m-%d', created_date) output_metadata = { 'date': '%sT13:00:00-08:00' % formatted_date, 'location': '', 'tags': metadata['tags'], 'thumbnail': '', 'title': metadata['title'], } # Make sure a 'drafts' tag is present so we can do some QC later. if 'drafts' not in output_metadata['tags']: output_metadata['tags'].append('drafts') # Build up the file. output_contents = ''.join(( '+++\n', toml.dumps(output_metadata), '\n+++', article_contents)) # Save the output to the default location. out_path = os.path.join('content/notes', filename) with open(out_path, 'w') as out_file: out_file.write(output_contents)
def remarshal(input, output, input_format, output_format, wrap=None, unwrap=None, indent_json=None, yaml_options={}): try: if input == '-': input_file = getattr(sys.stdin, 'buffer', sys.stdin) else: input_file = open(input, 'rb') if output == '-': output_file = getattr(sys.stdout, 'buffer', sys.stdout) else: try: output_file = open(output, 'wb') except FileNotFoundError as e: # There should never be another reason for a FileNotFoundError # here than a missing parent directory. raise NotADirectoryError("Not a directory: '{0}'" .format(os.path.dirname(output))) input_data = input_file.read() if input_format == 'json': try: parsed = json.loads(input_data.decode('utf-8')) except JSONDecodeError as e: raise ValueError('Cannot parse as JSON ({0})'.format(e)) elif input_format == 'toml': try: parsed = pytoml.loads(input_data) except pytoml.core.TomlError as e: raise ValueError('Cannot parse as TOML ({0})'.format(e)) elif input_format == 'yaml': try: parsed = yaml.load(input_data) except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e: raise ValueError('Cannot parse as YAML ({0})'.format(e)) else: raise ValueError('Unknown input format: {0}'.format(input_format)) if unwrap is not None: parsed = parsed[unwrap] if wrap is not None: temp = {} temp[wrap] = parsed parsed = temp if output_format == 'json': if indent_json is True: indent_json = 2 if indent_json: separators = (',', ': ') else: separators = (',', ':') output_data = json.dumps(parsed, default=json_serialize, ensure_ascii=False, indent=indent_json, separators=separators, sort_keys=True) + "\n" elif output_format == 'toml': try: output_data = pytoml.dumps(parsed, sort_keys=True) except AttributeError as e: if str(e) == "'list' object has no attribute 'keys'": raise ValueError('Cannot convert non-dictionary data to ' 'TOML; use "wrap" to wrap it in a ' 'dictionary') else: raise e elif output_format == 'yaml': output_data = yaml.safe_dump(parsed, allow_unicode=True, default_flow_style=False, encoding=None, **yaml_options) else: raise ValueError('Unknown output format: {0}'. format(output_format)) output_file.write(output_data.encode('utf-8')) output_file.close() finally: if 'input_file' in locals(): input_file.close() if 'output_file' in locals(): output_file.close()
def to_toml(a, *args, **kw): '''Make verbose, human readable toml''' transformed = pytoml.dumps(a, **kw) return to_text(transformed)
def main(): # config default_conf = agilebot.AgileBot.default_conf() conf = copy(default_conf) # config file if os.path.isfile(CONFIG_PATH): with open(CONFIG_PATH, 'r') as f: toml_config = toml.load(f) conf = util.left_merge(default_conf, toml_config) # logging conf log_level = os.environ.get('AGILEBOT_LOG_LEVEL') or conf['logging']['level'] logger.setLevel(log_level) logger.info('log level: {}'.format(log_level)) # library logging config lib_log_level = logging.CRITICAL logging.getLogger('oauthlib').setLevel(lib_log_level) logging.getLogger('requests').setLevel(lib_log_level) logging.getLogger('requests_oauthlib').setLevel(lib_log_level) # command line args parser = argparse.ArgumentParser( description='Automate functions for Agile development sprints.') subparsers = parser.add_subparsers(help='sub-command help', dest='subparser_0') parser.add_argument('--conf', action='store_true', default=False, help='print current configuration') # boards sub-command sub_commands = { 'slack': cmd_slack.sub_command(subparsers), 'sprint': cmd_sprint.sub_command(subparsers), 'trello': cmd_trello.sub_command(subparsers) } # set defaults, ENV var first, then config file, then command line args # don't set func or func_help due to a bug in argparse that isn't fixed in python 3.4 yet # http://bugs.python.org/issue9351 parser.set_defaults( # func=None, # func_help=parser.print_help, agile_sprint_lists=get_first_value( os.environ.get('AGILE_SPRINT_LISTS'), conf['agile']['sprint_lists'] ), ) # parse the arguments args = parser.parse_args() sc_0 = getattr(args, 'subparser_0', '') sc_1 = getattr(args, 'subparser_1', '') logger.debug('subparser_0: {}'.format(sc_0)) logger.debug('subparser_1: {}'.format(sc_1)) # agile conf['agile']['sprint_lists'] = args.agile_sprint_lists if not len(sys.argv) > 1: # no arguments given, show help logger.debug('sys.argv: {}'.format(len(sys.argv))) parser.print_help() elif args.conf: # show current config logger.debug('printing current configuration') print(toml.dumps(conf, sort_keys=True)) elif not getattr(args, 'func', None): # if the sub-command function is not set, show help logger.debug('sub-command function not found for: {}'.format(str(sys.argv))) if hasattr(args, 'func_help'): logger.debug('show sub-command specific help') func_help = args.func_help else: logger.debug('show general help') func_help = parser.print_help func_help() logger.debug('args namespace: {}'.format(pformat(args))) logger.debug('main parser: {}'.format(pformat(parser))) sys.exit(1) else: # run the sub-command logger.debug('executing: agilebot {} {}'.format(sc_0, sc_1)) args.func(args, conf)
def dump_cmd_options(options): print(pytoml.dumps({ 'options': _clean_non_serializables(options), })) print()
def dump_config(config): print(pytoml.dumps({'configs': _clean_non_serializables(config)})) print()
def dump_str(self, data): return toml.dumps(data)
def remarshal(input, output, input_format, output_format, wrap=None, unwrap=None, indent_json=None, yaml_options={}, ordered=False): try: if input == '-': input_file = getattr(sys.stdin, 'buffer', sys.stdin) else: input_file = open(input, 'rb') if output == '-': output_file = getattr(sys.stdout, 'buffer', sys.stdout) else: output_file = open(output, 'wb') input_data = input_file.read() if input_format == 'json': try: pairs_hook = OrderedDict if ordered else dict parsed = json.loads(input_data.decode('utf-8'), object_pairs_hook=pairs_hook) except JSONDecodeError as e: raise ValueError('Cannot parse as JSON ({0})'.format(e)) elif input_format == 'toml': try: pairs_hook = OrderedDict if ordered else dict parsed = pytoml.loads(input_data, object_pairs_hook=pairs_hook) except pytoml.core.TomlError as e: raise ValueError('Cannot parse as TOML ({0})'.format(e)) elif input_format == 'yaml': try: loader = OrderedLoader if ordered else TimezoneLoader parsed = yaml.load(input_data, loader) except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e: raise ValueError('Cannot parse as YAML ({0})'.format(e)) else: raise ValueError('Unknown input format: {0}'.format(input_format)) if unwrap is not None: parsed = parsed[unwrap] if wrap is not None: temp = {} temp[wrap] = parsed parsed = temp if output_format == 'json': if indent_json is True: indent_json = 2 if indent_json: separators = (',', ': ') else: separators = (',', ':') output_data = json.dumps(parsed, default=json_serialize, ensure_ascii=False, indent=indent_json, separators=separators, sort_keys=not ordered) + "\n" elif output_format == 'toml': try: output_data = pytoml.dumps(parsed, sort_keys=not ordered) except AttributeError as e: if str(e) == "'list' object has no attribute 'keys'": raise ValueError('Cannot convert non-dictionary data to ' 'TOML; use "wrap" to wrap it in a ' 'dictionary') else: raise e elif output_format == 'yaml': dumper = OrderedDumper if ordered else yaml.SafeDumper output_data = yaml.dump(parsed, None, dumper, allow_unicode=True, default_flow_style=False, encoding=None, **yaml_options) else: raise ValueError('Unknown output format: {0}'. format(output_format)) output_file.write(output_data.encode('utf-8')) output_file.close() finally: if 'input_file' in locals(): input_file.close() if 'output_file' in locals(): output_file.close()
def generate_lesson_dot_rmotr_file(name, _type, uuid=None): return toml.dumps({ 'uuid': str(uuid or uuid_module.uuid4()), 'name': name, 'type': _type })
def generate_unit_dot_rmotr_file(name, uuid=None): return toml.dumps({ 'uuid': str(uuid or uuid_module.uuid4()), 'name': name })