def parse_config(config, schema=None, keys=()): config = dict(config) if schema is None: schema = toml.loads(SAMPLE_CONFIG) parsed = {} for key, value in schema.items(): if key not in config: if isinstance(value, (bool, int)): parsed[key] = value else: parsed[key] = type(value)() elif isinstance(config[key], type(value)): if isinstance(value, dict): parsed[key] = parse_config(config.pop(key), value, keys=keys + (key, )) else: parsed[key] = config.pop(key) else: raise ValueError("in {!r}: invalid value {!r}".format( list(keys + (key, )), value)) if config: raise ValueError("in {!r}: unknown options {!r}".format( list(keys), sorted(config.keys()))) return parsed
def load_toml(name, ctx, **parsed): logger = pick(ctx, 'logger.local') is_valid, res = validate_vars(**parsed) if not is_valid: relpath = parsed.get('relpath', 'unknown') linenum = parsed.get('linenum', -1) raise YaggySyntaxError(relpath, linenum, res) if 'to_load' in res: filename = res['to_load'] with open(filename, 'rt', encoding='utf-8') as f: data = f.read() msg = f'# [{name.upper()}] "%(args)s" file loaded' else: cmd = res['to_exec'] result = subprocess.run(cmd, capture_output=True, encoding='utf-8') data = result.stdout msg = f'# [{name.upper()}] "%(args)s" command executed and loaded' data = qtoml.loads(data) current = pick(ctx, name) ctx[name] = mergedict(current, data) logger.info(msg, parsed)
def test_encode_none(): value = { 'a': None } with pytest.raises(qtoml.encoder.TOMLEncodeError): qtoml.dumps(value) rv = qtoml.dumps(value, encode_none=0) cycle = qtoml.loads(rv) assert cycle['a'] == 0
def test_valid_decode(valid_case): with assert_no_output(): json_val = untag(json.loads(valid_case['json'])) toml_val = qtoml.loads(valid_case['toml']) # some test cases include floats with value NaN, which compare unequal to # themselves and thus break a plain comparison assert patch_floats(toml_val) == patch_floats(json_val)
def get_config(infile): infile = Path(infile) with open(infile, 'r') as fp: if infile.suffix == ".toml": return toml.loads(fp.read()) else: return json.loads(fp.read(), object_pairs_hook=OrderedDict)
def generate_config(): # TODO ensure this works still config = qtoml.loads(Path("example.toml").read_text()) botsettings = config["Bot"] for each in botsettings.keys(): if type(botsettings[each]) is bool: botsettings[each] = prompt( f"Would you like to enable {each}? y/N ") elif type(botsettings[each]) is int: numerical_option = input(f"Please enter a number for {each}: ") assert numerical_option.isdigit() is True, "Must be a number!" botsettings[each] = int(numerical_option) else: botsettings[each] = input(f"Please enter your {each}: ") modules = getmodules() print( "Enter the number for each module you'd like to enable, separated by commas" ) print("Example: 1,5,8") message = ", ".join([f"{i}) {v}" for i, v in enumerate(modules)]) to_enable = input(f"{message}\n") for module_index in to_enable.split(","): if module_index.isdigit() and int(module_index) <= len(modules): config["Modules"]["enabled"].append(modules[int(module_index)]) return qtoml.dumps(config)
def test_encode_subclass(): value = OrderedDict(a=1, b=2, c=3, d=4, e=5) toml_val = qtoml.dumps(value) # ensure order is preserved assert toml_val == 'a = 1\nb = 2\nc = 3\nd = 4\ne = 5\n' cycle = qtoml.loads(toml_val) # cycle value is a plain dictionary, so this comparison is # order-insensitive assert value == cycle
def run(run_count = 5000): test_data = '' with open('data.toml', 'r', encoding='utf-8') as f: test_data = f.read() print(f'Parsing data.toml {run_count} times:') baseline = benchmark('pytomlpp', run_count, lambda: pytomlpp.loads(test_data)) benchmark('tomli', run_count, lambda: tomli.loads(test_data), compare_to=baseline) benchmark('toml', run_count, lambda: toml.loads(test_data), compare_to=baseline) benchmark('qtoml', run_count, lambda: qtoml.loads(test_data), compare_to=baseline) benchmark('tomlkit', run_count, lambda: tomlkit.parse(test_data), compare_to=baseline)
def parse_toml(self, path, prefix=None): """ Convenience method for applying parse_dict to a toml file. """ try: with open(path) as f: return self.parse_dict(toml.loads(f.read()), prefix=prefix) except SystemExit: print("%s: error: while parsing %r" % (os.path.basename(sys.argv[0]), path), file=sys.stderr) raise
def test_non_str_keys(): value = {1: 'foo'} with pytest.raises(qtoml.TOMLEncodeError): qtoml.dumps(value) class EnsureStringKeys(qtoml.TOMLEncoder): def default(self, o): if isinstance(o, dict): return {str(k): v for k, v in o.items()} return super().default(o) v = EnsureStringKeys().encode(value) assert qtoml.loads(v) == {'1': 'foo'}
def test_encode_path(): class PathEncoder(qtoml.TOMLEncoder): def default(self, obj): if isinstance(obj, Path): return obj.as_posix() return super().default(obj) pval = {'top': {'path': Path("foo") / "bar"}} sval = {'top': {'path': "foo/bar"}} v = PathEncoder().encode(pval) v2 = qtoml.dumps(pval, cls=PathEncoder) assert v == v2 nv = qtoml.loads(v) assert nv == {'top': {'path': 'foo/bar'}} v3 = qtoml.dumps(sval, cls=PathEncoder) assert v == v3
def test_encode_default(): value = UserDict({'a': 10, 'b': 20}) with pytest.raises(qtoml.TOMLEncodeError): qtoml.dumps(value) class UserDictEncoder(qtoml.TOMLEncoder): def default(self, obj): if isinstance(obj, UserDict): return obj.data # this calls the parent version which just always TypeErrors return super().default(obj) v = UserDictEncoder().encode(value) v2 = qtoml.dumps(value, cls=UserDictEncoder) assert v == v2 nv = qtoml.loads(v) assert nv == value.data
def __init__(self, *, configfile=None, config=None, act_time=None): if configfile is not None: configfile = Path(configfile) with open(configfile, 'r') as config: if configfile.suffix == ".toml": self.config = toml.loads( config.read()) # _dict=OrderedDict) self.type = "toml" else: self.config = json.load(config, object_pairs_hook=OrderedDict) self.type = "json" elif config is not None: self.config = config else: raise ValueError( "you should pass either a configfile or a config dict") self.act_time = self.name = act_time or "" if "type" in self.config: self.name += "_" + self.config["type"]
def read_from_file(cls, filename, name_to_json_class_index_extra=None, **kwargs): """ :param filename: the file which should be read. :param name_to_json_class_index_extra: An optional dictionary mapping names to class objects. For example: {"Panchangam": annual.Panchangam} :return: """ if name_to_json_class_index_extra is not None: json_class_index.update(name_to_json_class_index_extra) try: with open(filename) as fhandle: format = file_helper.deduce_format_from_filename(filename) data = fhandle.read() if "json" in format: input_dict = jsonpickle.decode(data) elif "toml" in format: try: input_dict = toml.loads(data) # Many bugs above. except TomlDecodeError as e: import qtoml input_dict = qtoml.loads(data) obj = cls.make_from_dict(input_dict=input_dict, **kwargs) obj.post_load_ops() return obj except Exception as e: try: import traceback traceback.print_exc() logging.info("Could not load as a dict. May be a list of dicts. Trying..") with open(filename) as fhandle: obj = cls.make_from_dict_list(jsonpickle.decode(fhandle.read())) return obj except Exception as e: logging.error("Error reading " + filename + " : ".format(e)) raise e
def clone_repo(project): print("### Grabbing features.toml ###") tempdir = tempfile.mkdtemp() os.chdir(tempdir) print(tempdir) pid = subprocess.Popen('git clone -q [email protected]:' + project + '.git .', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for line in pid.stdout.readlines(): print(line) pid.wait() data = None if (not os.path.exists("features.toml")): print("features.toml not found") return data with open("features.toml", "r") as fin: tmpstr = fin.read() data = qtoml.loads(tmpstr) return data
def main(): """Main entry point <Chart Directory> <Override Values Toml>""" newvals = {} releasename = sys.argv[1] chart = sys.argv[2] if (not releasename or not chart): return 0 chartvalues = "override/" + sys.argv[2] + "/values.yaml" rspfile = sys.argv[3] if (';' in releasename): releasename = releasename.split(';')[0] releasename = releasename.replace(';', '-v').replace('_', '-').lower() if (os.path.exists(chartvalues)): stream = open(chartvalues, 'r') values = yaml.load(stream) newvals.update(values) stream.close() print("RSP=" + rspfile) lines = subprocess.run( ['cat', rspfile], check=False, stdout=subprocess.PIPE).stdout.decode('utf-8').split("\n") cleanvals = {} for line in lines: if ('=' in line): key = line.split('=')[0] value = line.split('=', 1)[-1].strip() if (value.lower() == '"true"' or value.lower() == "'true'"): value = "true" if (value.lower() == '"false"' or value.lower() == "'false'"): value = "false" if ('?' not in key): cleanvals[key] = value valstr = "" for key, value in cleanvals.items(): valstr = valstr + key + " = " + value + "\n" values = qtoml.loads(valstr) newvals.update(values) if (os.path.exists(chartvalues)): os.rename(chartvalues, chartvalues + ".bak") if not os.path.exists(os.path.dirname(chartvalues)): os.makedirs(os.path.dirname(chartvalues)) stream = open(chartvalues, "w") yaml.dump(newvals, stream) stream.close() override = os.path.abspath(chartvalues) timestamp = int(time.time() * 1000.0) to_dir = "/tmp/dh" + str(timestamp) from_dir = os.getcwd() tempdir = tempfile.mkdtemp() os.chdir(tempdir) localconnection = "" if ('server_hostname' in newvals): hname = newvals['server_hostname'] newvals['server_ip'] = socket.gethostbyname(hname) if (hname == "localhost"): newvals['server_ip'] = "127.0.0.1" localconnection = " --connection=local" os.mkdir('group_vars') fp_all = open('group_vars/all.yml', 'w') fp_all.write("---\n") if ('sshuser' in newvals): fp_all.write("ansible_user: "******"\n") if ('sshpass' in newvals): fp_all.write("ansible_password: "******"\n") if ('sshkeyfile' in newvals): fp_all.write("ansible_ssh_private_key_file: " + newvals['sshkeyfile'] + "\n") if ('suuser' in newvals): fp_all.write("ansible_become: yes\n") fp_all.write("ansible_become_method: su\n") fp_all.write("ansible_become_user: "******"\n") if ('supass' in newvals): fp_all.write("ansible_become_pass: "******"\n") fp_all.write("validate_certs: false\n") yaml.dump(newvals, fp_all) fp_all.close() fp_task = open('runit.yml', 'w') fp_task.write("---\n") fp_task.write("- name: Transfer File\n") fp_task.write(" hosts: all\n") fp_task.write(" gather_facts: no\n") fp_task.write("\n") fp_task.write(" tasks:\n") fp_task.write(" - name: Make Dir\n") fp_task.write(" file:\n") fp_task.write(" path: " + to_dir + "\n") fp_task.write(" state: directory\n") fp_task.write("\n") fp_task.write(" - name: Copy File\n") fp_task.write(" copy:\n") fp_task.write(" src: " + from_dir + "/\n") fp_task.write(" dest: " + to_dir + "\n") fp_task.write("\n") # pprint(newvals) if ('gcloud' in newvals): if ('compute' in newvals['gcloud'] and 'zone' in newvals['gcloud']['compute']): runcmd( fp_task, to_dir, 'gcloud config set compute/zone ' + newvals['gcloud']['compute']['zone']) if ('container' in newvals['gcloud'] and 'cluster' in newvals['gcloud']['container']): runcmd( fp_task, to_dir, 'gcloud config set container/cluster ' + newvals['gcloud']['container']['cluster']) if ('core' in newvals['gcloud'] and 'account' in newvals['gcloud']['core']): runcmd( fp_task, to_dir, 'gcloud config set core/account ' + newvals['gcloud']['core']['account']) if ('core' in newvals['gcloud'] and 'disable_usage_reporting' in newvals['gcloud']['core']): runcmd( fp_task, to_dir, 'gcloud config set core/disable_usage_reporting ' + newvals['gcloud']['core']['disable_usage_reporting']) if ('core' in newvals['gcloud'] and 'project' in newvals['gcloud']['core']): runcmd( fp_task, to_dir, 'gcloud config set core/project ' + newvals['gcloud']['core']['project']) if ('oauth' in newvals['gcloud'] and 'account' in newvals['gcloud']['oauth']): runcmd( fp_task, to_dir, 'gcloud auth activate-service-account ' + newvals['gcloud']['oauth']['account'] + ' --key-file=' + newvals['gcloud']['oauth']['keyfile']) if ('container' in newvals['gcloud'] and 'cluster' in newvals['gcloud']['container']): runcmd( fp_task, to_dir, 'gcloud container clusters get-credentials ' + newvals['gcloud']['container']['cluster']) if ('eks' in newvals): if ('aws_access_key_id' in newvals['eks']): runcmd( fp_task, to_dir, 'aws --profile default configure set aws_access_key_id ' + newvals['eks']['aws_access_key_id']) if ('aws_secret_access_key' in newvals['eks']): runcmd( fp_task, to_dir, 'aws --profile default configure set aws_secret_access_key ' + newvals['eks']['aws_secret_access_key']) if ('cluster' in newvals['eks'] and 'region' in newvals['eks']): region = newvals['eks']['region'] cluster = newvals['eks']['cluster'] optional = newvals['eks'].get('optional', '') runcmd( fp_task, to_dir, 'aws eks --region ' + region + ' update-kubeconfig --name ' + cluster + ' ' + optional) if ('aks' in newvals): if ('serviceprincipal' in newvals['aks'] and 'certificate' in newvals['aks'] and 'tenant' in newvals['aks']): serviceprincipal = newvals['aks']['serviceprincipal'] certificate = newvals['aks']['certificate'] tenant = newvals['aks']['tenant'] runcmd( fp_task, to_dir, 'az login --service-principal -u ' + serviceprincipal + ' -p ' + certificate + ' --tenant ' + tenant) if ('cluster' in newvals['aks'] and 'resourcegroup' in newvals['aks']): resourcegroup = newvals['aks']['resourcegroup'] cluster = newvals['aks']['cluster'] runcmd( fp_task, to_dir, 'az aks get-credentials --resource-group ' + resourcegroup + ' --name ' + cluster) helm_exe = newvals.get("helm_exe", "helm") if (helm_exe.lower() == "helm2"): runcmd(fp_task, to_dir, helm_exe + ' init --client-only --upgrade') runcmd( fp_task, to_dir, 'cat ' + to_dir + "/" + chartvalues + " | grep -v pass | grep -v ssh | grep -v aws_access_key_id | grep -v aws_secret_access_key | grep -v serviceprincipal | grep -v tenant" ) if ('helmrepo' in newvals and 'url' in newvals['helmrepo']): mylogin = "******" if ('username' in newvals['helmrepo']): mylogin = mylogin + "--username " + newvals['helmrepo'][ 'username'] + " " if ('password' in newvals['helmrepo']): mylogin = mylogin + "--password " + newvals['helmrepo'][ 'password'] + " " if ('helmrepouser' in newvals): mylogin = mylogin + "--username " + newvals['helmrepouser'] + " " if ('helmrepopass' in newvals): mylogin = mylogin + "--password " + newvals['helmrepopass'] + " " runcmd( fp_task, to_dir, helm_exe + ' repo add ' + mylogin + newvals['helmrepo']['name'] + " " + newvals['helmrepo']['url']) runcmd(fp_task, to_dir, helm_exe + ' repo update') version = newvals.get('chartversion', 'latest') namespace = "" if ('chartnamespace' in newvals): namespace = '--namespace "' + newvals['chartnamespace'] + '"' helmopts = newvals.get('helmopts', '') helmextract = newvals.get('helmextract', None) if (helmextract is not None): runcmd( fp_task, to_dir, helm_exe + ' pull "' + chart + '" --version "' + version + '" --untar') runcmd( fp_task, to_dir, helm_exe + ' upgrade "' + releasename + '" "' + chart + '" --version "' + version + '" ' + namespace + ' ' + helmopts + ' --install -f ' + chartvalues) fp_task.close() if (os.path.exists(rspfile)): os.remove(rspfile) my_env = os.environ.copy() my_env['ANSIBLE_STDOUT_CALLBACK'] = 'yaml' pid = subprocess.Popen( 'ansible-playbook runit.yml --ssh-common-args="-o StrictHostKeyChecking=no" ' + localconnection + ' -i ' + newvals['server_hostname'] + ',', env=my_env, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for line in pid.stdout.readlines(): line = line.decode('utf-8').strip('\n') if ('ssh' not in line and 'pass' not in line): print(line) pid.wait() upload_helm(override, chartvalues, newvals) os.chdir('/tmp') if ('debug_ansible' in newvals): print(tempdir) else: shutil.rmtree(tempdir) exit(pid.returncode)
def test_string_encode(data): obj = {'key': data} assert qtoml.loads(qtoml.dumps(obj)) == obj
def test_circular_encode(data): assert patch_floats(qtoml.loads(qtoml.dumps(data))) == patch_floats(data)
def test_valid_encode(valid_case): json_val = untag(json.loads(valid_case['json'])) toml_str = qtoml.dumps(json_val) toml_reload = qtoml.loads(toml_str) assert patch_floats(toml_reload) == patch_floats(json_val)
def test_invalid_decode(invalid_decode_case): with pytest.raises(qtoml.decoder.TOMLDecodeError): qtoml.loads(invalid_decode_case['toml'])
def main(): """Main entry point <Function Directory> <Override Values Toml>""" newvals = {} rspfile = sys.argv[1] print("RSP=" + rspfile) lines = subprocess.run( ['cat', rspfile], check=False, stdout=subprocess.PIPE).stdout.decode('utf-8').split("\n") cleanvals = {} for line in lines: if ('=' in line): key = line.split('=')[0] value = line.split('=', 1)[-1].strip() if (value.lower() == '"true"' or value.lower() == "'true'"): value = "true" if (value.lower() == '"false"' or value.lower() == "'false'"): value = "false" if ('?' not in key): cleanvals[key] = value valstr = "" for key, value in cleanvals.items(): valstr = valstr + key + " = " + value + "\n" values = qtoml.loads(valstr) newvals.update(values) timestamp = int(time.time() * 1000.0) to_dir = "/tmp/dh" + str(timestamp) # from_dir = os.getcwd() tempdir = tempfile.mkdtemp() os.chdir(tempdir) if ('lambda' not in newvals): print("lamba variables not defined\n") return funcname = newvals['lambda'].get('functionname', None) if (funcname is not None): funcname = funcname.strip() funcfile = newvals['lambda'].get('functionfile', None) arcname = Path(funcfile).name if (funcfile is not None): with ZipFile(tempdir + "/" + funcname + '.zip', 'w') as myzip: myzip.write(filename=funcfile, arcname=arcname) funcfile = "--zip-file fileb://" + to_dir + "/" + funcname + '.zip' else: funcfile = "" localconnection = "" if ('server_hostname' in newvals): hname = newvals['server_hostname'] newvals['server_ip'] = socket.gethostbyname(hname) if (hname == "localhost"): newvals['server_ip'] = "127.0.0.1" localconnection = " --connection=local" os.mkdir('group_vars') fp_all = open('group_vars/all.yml', 'w') fp_all.write("---\n") if ('sshuser' in newvals): fp_all.write("ansible_user: "******"\n") if ('sshpass' in newvals): fp_all.write("ansible_password: "******"\n") if ('sshkeyfile' in newvals): fp_all.write("ansible_ssh_private_key_file: " + newvals['sshkeyfile'] + "\n") if ('suuser' in newvals): fp_all.write("ansible_become: yes\n") fp_all.write("ansible_become_method: su\n") fp_all.write("ansible_become_user: "******"\n") if ('supass' in newvals): fp_all.write("ansible_become_pass: "******"\n") fp_all.write("validate_certs: false\n") yaml.dump(newvals, fp_all) fp_all.close() fp_task = open('runit.yml', 'w') fp_task.write("---\n") fp_task.write("- name: Transfer File\n") fp_task.write(" hosts: all\n") fp_task.write(" gather_facts: no\n") fp_task.write("\n") fp_task.write(" tasks:\n") fp_task.write(" - name: Make Dir\n") fp_task.write(" file:\n") fp_task.write(" path: " + to_dir + "\n") fp_task.write(" state: directory\n") fp_task.write("\n") fp_task.write(" - name: Copy File\n") fp_task.write(" copy:\n") fp_task.write(" src: " + tempdir + "/" + funcname + ".zip\n") fp_task.write(" dest: " + to_dir + "\n") fp_task.write("\n") if ('aws' in newvals): if ('aws_access_key_id' in newvals['aws']): runcmd( fp_task, to_dir, 'aws --profile default configure set aws_access_key_id ' + newvals['aws']['aws_access_key_id']) if ('aws_secret_access_key' in newvals['aws']): runcmd( fp_task, to_dir, 'aws --profile default configure set aws_secret_access_key ' + newvals['aws']['aws_secret_access_key']) s3bucket = prefix_param("--s3-bucket", newvals['lambda'].get('s3bucket', None)) s3key = prefix_param("--s3-key", newvals['lambda'].get('s3key', None)) s3objectversion = prefix_param( "--s3-object-version", newvals['lambda'].get('s3objectversion', None)) publish = newvals['lambda'].get('publish', '--publish') revisionid = prefix_param("--revision-id", newvals['lambda'].get('revisionid', None)) funcname = prefix_param("--function-name", funcname) cmd = "aws lambda update-function-code " + funcname + " " + funcfile + " " + s3bucket + " " + s3key + " " + s3objectversion + " " + publish + " " + revisionid runcmd(fp_task, to_dir, cmd) fp_task.close() # if (os.path.exists(rspfile)): # os.remove(rspfile) print(tempdir) my_env = os.environ.copy() my_env['ANSIBLE_STDOUT_CALLBACK'] = 'yaml' pid = subprocess.Popen( 'ansible-playbook runit.yml --ssh-common-args="-o StrictHostKeyChecking=no" ' + localconnection + ' -i ' + newvals['server_hostname'] + ',', env=my_env, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for line in pid.stdout.readlines(): line = line.decode('utf-8').strip('\n') if ('ssh' not in line and 'pass' not in line): print(line) pid.wait() os.chdir('/tmp') if ('debug_ansible' in newvals): print(tempdir) else: shutil.rmtree(tempdir) exit(pid.returncode)
def main(): """Main entry point <Chart Directory> <Override Values Toml>""" newvals = {} releasename = sys.argv[1] chart = sys.argv[2] if (not releasename or not chart): return 0 chartvalues = "override/" + sys.argv[2] + "/values.yaml" rspfile = sys.argv[3] releasename = releasename.replace(';', '-v').replace('_', '-').lower() if (os.path.exists(chartvalues)): stream = open(chartvalues, 'r') values = yaml.load(stream) newvals.update(values) stream.close() print("RSP=" + rspfile) lines = subprocess.run( ['cat', rspfile], stdout=subprocess.PIPE).stdout.decode('utf-8').split("\n") cleanvals = "" for line in lines: if ('=' in line): key = line.split('=')[0] if ('?' not in key): cleanvals = cleanvals + line + "\n" values = qtoml.loads(cleanvals) newvals.update(values) if (os.path.exists(chartvalues)): os.rename(chartvalues, chartvalues + ".bak") if not os.path.exists(os.path.dirname(chartvalues)): os.makedirs(os.path.dirname(chartvalues)) stream = open(chartvalues, "w") yaml.dump(newvals, stream) stream.close() # pprint(newvals) if ('gcloud' in newvals): if ('compute' in newvals['gcloud'] and 'zone' in newvals['gcloud']['compute']): runcmd('gcloud config set compute/zone ' + newvals['gcloud']['compute']['zone']) if ('container' in newvals['gcloud'] and 'cluster' in newvals['gcloud']['container']): runcmd('gcloud config set container/cluster ' + newvals['gcloud']['container']['cluster']) if ('core' in newvals['gcloud'] and 'account' in newvals['gcloud']['core']): runcmd('gcloud config set core/account ' + newvals['core']['account']) if ('core' in newvals['gcloud'] and 'disable_usage_reporting' in newvals['gcloud']['core']): runcmd('gcloud config set core/disable_usage_reporting ' + newvals['gcloud']['core']['disable_usage_reporting']) if ('core' in newvals['gcloud'] and 'project' in newvals['gcloud']['core']): runcmd('gcloud config set core/project ' + newvals['gcloud']['core']['project']) if ('oauth' in newvals['gcloud'] and 'account' in newvals['gcloud']['oauth']): runcmd('gcloud auth activate-service-account ' + newvals['gcloud']['oauth']['account'] + ' --key-file=' + newvals['gcloud']['oauth']['keyfile']) if ('container' in newvals['gcloud'] and 'cluster' in newvals['gcloud']['container']): runcmd('gcloud container clusters get-credentials ' + newvals['gcloud']['container']['cluster']) runcmd('helm init --upgrade') runcmd('cat ' + chartvalues) if ('helmrepo' in newvals and 'url' in newvals['helmrepo']): mylogin = "******" if ('username' in newvals['helmrepo']): mylogin = mylogin + "--username " + newvals['helmrepo'][ 'username'] + " " if ('password' in newvals['helmrepo']): mylogin = mylogin + "--password " + newvals['helmrepo'][ 'password'] + " " runcmd('helm repo add ' + mylogin + newvals['helmrepo']['name'] + " " + newvals['helmrepo']['url']) runcmd('helm repo update') version = 'latest' if ('version' in newvals): version = newvals['version'] retry = runcmd('helm upgrade "' + releasename + '" "' + chart + '" --version "' + version + '" --install --force --debug -f ' + chartvalues) if (retry): print("Retrying helm upgrade") runcmd('helm upgrade "' + releasename + '" "' + chart + '" --version "' + version + '" --install --force --debug -f ' + chartvalues) os.remove(chartvalues) os.remove(rspfile)
import pytomlpp import toml import tomlkit import qtoml import timeit def benchmark(name, func, number=5000): print(f'{name:>10}: Running...', end='', flush=True) res = str(timeit.timeit(func, number=number)).split('.') print('\b'*10 + f'{res[0]:>3}.{res[1]} s') test_data = '' with open('data.toml', 'r', encoding='utf-8') as f: test_data = f.read() benchmark('pytomlpp', lambda: pytomlpp.loads(test_data)) benchmark('toml', lambda: toml.loads(test_data)) benchmark('qtoml', lambda: qtoml.loads(test_data)) benchmark('tomlkit', lambda: tomlkit.parse(test_data))
def load(self) -> dict: config = qtoml.loads(self.path.read_text()) return config
def generate(self): self.scenario('abstract_hierarchy') with open('samples/abstract_hierarchy/hierarchy.eno') as file: eno_hierarchy = file.read() with open('samples/abstract_hierarchy/hierarchy.toml') as file: toml_hierarchy = file.read() with open('samples/abstract_hierarchy/hierarchy.yaml') as file: yaml_hierarchy = file.read() def eno_hierarchy_query(): document = enolib.parse(eno_hierarchy) doc = document.section('doc') doc.list('colors').required_string_values() traits = doc.fieldset('traits') traits.entry('tired').required_string_value() traits.entry('extroverted').required_string_value() traits.entry('funny').required_string_value() traits.entry('inventive').required_string_value() doc.list('things').required_string_values() deep = doc.section('deep') deep.field('sea').required_string_value() deeper = deep.section('deep') deeper.field('sea').required_string_value() deepest = deeper.section('deep') deepest.field('sea').required_string_value() self.benchmark('[-] enolib', ENOLIB_VERSION, lambda: enolib.parse(eno_hierarchy)) self.benchmark('[✓] enolib', ENOLIB_VERSION, eno_hierarchy_query) self.benchmark( '[-] pyyaml (FullLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_hierarchy, Loader=yaml.FullLoader), 10) self.benchmark('[-] pyyaml (CLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_hierarchy, Loader=yaml.CLoader)) self.benchmark('[-] qtoml', QTOML_VERSION, lambda: qtoml.loads(toml_hierarchy)) self.benchmark('[-] ruamel.yaml', RUAMEL_YAML_VERSION, lambda: ruamel.load(yaml_hierarchy)) self.benchmark('[-] toml', TOML_VERSION, lambda: toml.loads(toml_hierarchy)) self.benchmark('[-] tomlkit', TOMLKIT_VERSION, lambda: tomlkit.parse(toml_hierarchy), 10) self.scenario('content_heavy') with open('samples/content_heavy/content.eno') as file: eno_content = file.read() with open('samples/content_heavy/content.toml') as file: toml_content = file.read() with open('samples/content_heavy/content.yaml') as file: yaml_content = file.read() def eno_content_query(): enolib.parse(eno_content).field('content').required_string_value() self.benchmark('[-] enolib', ENOLIB_VERSION, lambda: enolib.parse(eno_content)) self.benchmark('[✓] enolib', ENOLIB_VERSION, eno_content_query) self.benchmark('[-] pyyaml (FullLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_content, Loader=yaml.FullLoader), 100) self.benchmark('[-] pyyaml (CLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_content, Loader=yaml.CLoader)) self.benchmark('[-] qtoml', QTOML_VERSION, lambda: qtoml.loads(toml_content), 100) self.benchmark('[-] ruamel.yaml', RUAMEL_YAML_VERSION, lambda: ruamel.load(yaml_content)) self.benchmark('[-] toml', TOML_VERSION, lambda: toml.loads(toml_content), 10) self.benchmark('[-] tomlkit', TOMLKIT_VERSION, lambda: tomlkit.parse(toml_content), 100) self.scenario('invented_server_configuration') with open('samples/invented_server_configuration/configuration.eno' ) as file: eno_configuration = file.read() with open('samples/invented_server_configuration/configuration.toml' ) as file: toml_configuration = file.read() with open('samples/invented_server_configuration/configuration.yaml' ) as file: yaml_configuration = file.read() def eno_configuration_query(): document = enolib.parse(eno_configuration) for environment in document.sections(): for server in environment.sections(): conf = server.fieldset('conf') conf.entry('ruby').required_boolean_value() conf.entry('python').required_boolean_value() server.field('clean').required_boolean_value() server.list('steps').required_string_values() self.benchmark('[-] enolib', ENOLIB_VERSION, lambda: enolib.parse(eno_configuration)) self.benchmark('[✓] enolib', ENOLIB_VERSION, eno_configuration_query) self.benchmark( '[-] pyyaml (FullLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_configuration, Loader=yaml.FullLoader), 10) self.benchmark( '[-] pyyaml (CLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_configuration, Loader=yaml.CLoader)) self.benchmark('[-] qtoml', QTOML_VERSION, lambda: qtoml.loads(toml_configuration)) self.benchmark('[-] ruamel.yaml', RUAMEL_YAML_VERSION, lambda: ruamel.load(yaml_configuration)) self.benchmark('[-] toml', TOML_VERSION, lambda: toml.loads(toml_configuration)) self.benchmark('[-] tomlkit', TOMLKIT_VERSION, lambda: tomlkit.parse(toml_configuration), 10) self.scenario('jekyll_post_example') with open('samples/jekyll_post_example/post.eno') as file: eno_post = file.read() with open('samples/jekyll_post_example/post.toml') as file: toml_post = file.read() with open('samples/jekyll_post_example/post.yaml') as file: yaml_post = file.read() def eno_post_query(): document = enolib.parse(eno_post) document.field('layout').required_string_value() document.field('title').required_string_value() document.field('date').required_datetime_value() document.field('categories').required_string_value() document.field('markdown').required_string_value() self.benchmark('[-] enolib', ENOLIB_VERSION, lambda: enolib.parse(eno_post)) self.benchmark('[✓] enolib', ENOLIB_VERSION, eno_post_query) self.benchmark('[-] pyyaml (FullLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_post, Loader=yaml.FullLoader), 10) self.benchmark('[-] pyyaml (CLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_post, Loader=yaml.CLoader)) self.benchmark('[-] qtoml', QTOML_VERSION, lambda: qtoml.loads(toml_post)) self.benchmark('[-] ruamel.yaml', RUAMEL_YAML_VERSION, lambda: ruamel.load(yaml_post)) self.benchmark('[-] toml', TOML_VERSION, lambda: toml.loads(toml_post)) self.benchmark('[-] tomlkit', TOMLKIT_VERSION, lambda: tomlkit.parse(toml_post), 10) self.scenario('journey_route_data') with open('samples/journey_route_data/journey.eno') as file: eno_journey = file.read() with open('samples/journey_route_data/journey.toml') as file: toml_journey = file.read() with open('samples/journey_route_data/journey.yaml') as file: yaml_journey = file.read() def eno_journey_query(): document = enolib.parse(eno_journey) document.field('title').required_string_value() document.field('date').required_date_value() document.field('time').required_string_value() document.field('abstract').required_string_value() for checkpoint in document.sections('checkpoint'): checkpoint.field('coordinates').required_string_value() checkpoint.field('hint').optional_string_value() checkpoint.field('special').optional_string_value() checkpoint.field('location').required_string_value() safezone = checkpoint.optional_section('safezone') if safezone: safezone.field('shape').required_string_value() safezone.field('center').required_string_value() safezone.field('radius').required_integer_value() self.benchmark('[-] enolib', ENOLIB_VERSION, lambda: enolib.parse(eno_journey)) self.benchmark('[✓] enolib', ENOLIB_VERSION, eno_journey_query) self.benchmark('[-] pyyaml (FullLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_journey, Loader=yaml.FullLoader), 10) self.benchmark('[-] pyyaml (CLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_journey, Loader=yaml.CLoader)) self.benchmark('[-] qtoml', QTOML_VERSION, lambda: qtoml.loads(toml_journey), 10) self.benchmark('[-] ruamel.yaml', RUAMEL_YAML_VERSION, lambda: ruamel.load(yaml_journey), 10) self.benchmark('[-] toml', TOML_VERSION, lambda: toml.loads(toml_journey)) self.benchmark('[-] tomlkit', TOMLKIT_VERSION, lambda: tomlkit.parse(toml_journey), 10) self.scenario('yaml_invoice_example') with open('samples/yaml_invoice_example/invoice.eno') as file: eno_invoice = file.read() with open('samples/yaml_invoice_example/invoice.toml') as file: toml_invoice = file.read() with open('samples/yaml_invoice_example/invoice.yaml') as file: yaml_invoice = file.read() def eno_invoice_query(): document = enolib.parse(eno_invoice) document.field('invoice').required_integer_value() document.field('date').required_date_value() document.field('tax').required_float_value() document.field('total').required_float_value() document.field('comments').required_string_value() for type in ['bill-to', 'ship-to']: contact = document.section(type) contact.field('given').required_string_value() contact.field('family').required_string_value() address = contact.section('address') address.field('lines').required_string_value() address.field('city').required_string_value() address.field('state').required_string_value() address.field('postal').required_string_value() for product in document.sections('product'): product.field('sku').required_string_value() product.field('quantity').required_integer_value() product.field('description').required_string_value() product.field('price').required_string_value() self.benchmark('[-] enolib', ENOLIB_VERSION, lambda: enolib.parse(eno_invoice)) self.benchmark('[✓] enolib', ENOLIB_VERSION, eno_invoice_query) self.benchmark('[-] pyyaml (FullLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_invoice, Loader=yaml.FullLoader), 10) self.benchmark('[-] pyyaml (CLoader)', PYYAML_VERSION, lambda: yaml.load(yaml_invoice, Loader=yaml.CLoader)) self.benchmark('[-] qtoml', QTOML_VERSION, lambda: qtoml.loads(toml_invoice), 10) self.benchmark('[-] ruamel.yaml', RUAMEL_YAML_VERSION, lambda: ruamel.load(yaml_invoice)) self.benchmark('[-] toml', TOML_VERSION, lambda: toml.loads(toml_invoice)) self.benchmark('[-] tomlkit', TOMLKIT_VERSION, lambda: tomlkit.parse(toml_invoice), 10) with open('reports/python.eno', 'w') as file: file.write(self.report)