def submit_job(): job_args = copy.deepcopy(request.get_json()) logger.info("Received job to submit: {}".format(job_args["job_uid"])) job_uid = job_args["job_uid"] foreground = job_args["foreground"] logger.info(f"Foreground: {foreground}") provider = job_args["provider"] monkeyfs_path = get_local_filesystem_for_provider(provider) job_folder_path = os.path.join(MONKEYFS_LOCAL_PATH, "jobs", job_uid) provider_job_folder_path = os.path.join(monkeyfs_path, "jobs", job_uid) with open(os.path.join(job_folder_path, "job.yaml"), "w") as f: y = YAML() y.explicit_start = True y.default_flow_style = False y.dump(job_args, f) with open(os.path.join(provider_job_folder_path, "job.yaml"), "w") as f: y = YAML() y.explicit_start = True y.default_flow_style = False y.dump(job_args, f) success, msg = monkey.submit_job(job_args, foreground=foreground) res = {"msg": msg, "success": success} logger.info("Finished submitting job") return jsonify(res)
def _to_yaml( obj, filename: Union[str, PathLike] = None, default_flow_style: bool = False, encoding: str = "utf-8", errors: str = "strict", ruamel_typ: str = "rt", ruamel_attrs: Optional[Dict] = None, **yaml_kwargs, ): if not ruamel_attrs: ruamel_attrs = {} if filename: _exists(filename, create=True) with open(filename, "w", encoding=encoding, errors=errors) as f: if ruamel_available: yaml_dumper = YAML(typ=ruamel_typ) yaml_dumper.default_flow_style = default_flow_style for attr, value in ruamel_attrs.items(): setattr(yaml_dumper, attr, value) return yaml_dumper.dump(obj, stream=f, **yaml_kwargs) elif pyyaml_available: return yaml.dump(obj, stream=f, default_flow_style=default_flow_style, **yaml_kwargs) else: raise BoxError( "No YAML Parser available, please install ruamel.yaml>0.17 or PyYAML" ) else: if ruamel_available: yaml_dumper = YAML(typ=ruamel_typ) yaml_dumper.default_flow_style = default_flow_style for attr, value in ruamel_attrs.items(): setattr(yaml_dumper, attr, value) with StringIO() as string_stream: yaml_dumper.dump(obj, stream=string_stream, **yaml_kwargs) return string_stream.getvalue() elif pyyaml_available: return yaml.dump(obj, default_flow_style=default_flow_style, **yaml_kwargs) else: raise BoxError( "No YAML Parser available, please install ruamel.yaml>0.17 or PyYAML" )
def testToYAMLs(self, tests, rootFolder='./testsNewBuild/'): """Writes a batch of tests to file in the yaml format, grouping them by team and name :param tests: list of tests to write to file :type tests: list :param rootFolder: destination folder, defaults to './testsNewBuild/' :param rootFolder: str, optional """ # extract unique test names uniqueTestNames = set([c.name for c in tests]) # group by test names to put them in same files for name in uniqueTestNames: yaml = YAML() yaml.default_flow_style = False testDict = None for t in tests: if t.name == name: f = open(os.path.join( rootFolder, t.team, name + '.yaml'), "w+") if testDict == None: testDict = t.toDict() else: key = 'metric' + str(len(testDict['metrics'])+1) testDict['metrics'][key] = t.toDict()[ 'metrics']['metric1'] yaml.dump(testDict, f)
def save_libraries_file(self): yaml = YAML(typ='safe', pure=True) yaml.default_flow_style = False yaml_file = self.config / "libraries.yaml" data = [] for library in self.libraries: l = {} l['name'] = library.name l['lib_type'] = library.lib_type libname = library.search_name l['search_name'] = libname[1] l['url'] = library.url l['version'] = library.version req_libs = [] for r in library.required_libraries(): req_lib = {} req_lib['name'] = r.name req_lib['version'] = r.version req_libs = req_lib l['required_libraries'] = req_libs opt_libs = [] for o in library.optional_libraries(): opt_lib = {} opt_lib['name'] = o.name opt_lib['version'] = o.version opt_libs = opt_lib l['optional_libraries'] = opt_libs data.append(l) yaml.dump(self, data, yaml_file)
def load_libraries_file(self): yaml = YAML(typ='safe', pure=True) yaml.default_flow_style = False yaml_file = self.config / "libraries.yaml" data = yaml.load(yaml_file) self.libraries = {} libraries = data.get('libraries', {}) if libraries: for lib in libraries: library = Library() library.name = lib.get('name') library.url = lib.get('url') library.lib_type = LibraryType[lib.get('type')] library.search_name = lib.get('search_name') rl = lib.get('required_libraries') if rl: for r in rl: library.add_required_library(r['name'], r['version']) ol = lib.get('optional_libraries') if ol: for o in ol: if o in rl: # this makes certain that the library cannot be both optional and required # required overrides optional so optional will be lost at next save_libraries_file. continue library.add_optional_library(o['name'], o['version'], o['notes']) self.libraries[library.name] = library self.set_library_tbl_values(library)
def export(self, stream, typ='yml'): """Freeze environment""" log.warning("Exporting environment '%s'", self.name) if typ == 'yml': res = subprocess.run([ "conda", "env", "export", "-p", self.path, ], stdout=subprocess.PIPE) yaml = YAML(typ='rt') yaml.default_flow_style = False env = yaml.load(res.stdout) env['name'] = self.name del env['prefix'] yaml.dump(env, stream) elif typ == 'txt': res = subprocess.run([ "conda", "list", "--explicit", "--md5", "-p", self.path, ], stdout=stream) return res.returncode
def force_yaml_channels(yamlfile, channels): """ Replace the `channels:` block with `channels` :param yamlfile: str: path to yaml file :param channels: list: channel URLs """ if not isinstance(channels, list): raise TypeError("Expecting a list of URLs") yaml = YAML() yaml.default_flow_style = False yaml.indent(offset=2) with open(yamlfile) as yaml_data: result = yaml.load(yaml_data) if not result.get('channels'): print(f"{yamlfile} has no channels", file=sys.stderr) return # Assuming there's a reason to change the file... if result['channels'] != channels: result['channels'] = channels with open(yamlfile, 'w') as fp: yaml.dump(result, fp)
def to_yml(self): """Save the current config""" from ruamel.yaml import YAML yaml = YAML() yaml.default_flow_style = False with open(self._path, 'w') as file: yaml.dump(self.__dict__, file)
def main(): chdir2cwd(__file__) ym = YAML(typ='safe') ym.default_flow_style = None filepath = os.path.join(basepath, filename) with open(filepath, 'r', encoding="utf-8") as fl: data = ym.load(fl) keys = uniq(flatten([list(d) for d in data])) print(keys) rst = {} for k in keys: q = flatten([d.get(k, []) for d in data]) q_cnt = Counter(q) q_items = q_cnt.most_common() q_gt1 = [t for t, n in q_items if n > 1] rst[k] = sorted(q_gt1, reverse=True) with open("duplicated_claus.yml", 'w', encoding="utf-8") as fl: # ym.dump( sim_report_group, fl, transform=tr ) ym.dump(rst, fl)
def _watcher(osde2ectl_cmd, account_config, my_path, cluster_count, delay, my_uuid): logging.info('Watcher thread started') logging.info('Getting status every %d seconds' % int(delay)) yaml = YAML(pure=True) yaml.default_flow_style = False yaml.explicit_start = False yaml.explicit_end = False yaml.allow_duplicate_keys = True yaml.dump(account_config, open(my_path + "/account_config.yaml", 'w')) my_config = yaml.load(open(my_path + "/account_config.yaml")) my_thread = threading.currentThread() cmd = [osde2ectl_cmd, "list", "--custom-config", "account_config.yaml"] # To stop the watcher we expect the run attribute to be not True while getattr(my_thread, "run", True): logging.debug(cmd) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=my_path, universal_newlines=True) stdout, stderr = process.communicate() cluster_count = 0 state = {} status = {} error = [] # Count the various states/status' and report it to logging for line in stdout.splitlines(): if my_config['ocm']['userOverride'] in line: cluster_count += 1 state_key = line.split()[2] status_key = line.split()[3] state[state_key] = state.get(state_key, 0) + 1 status[status_key] = status.get(status_key, 0) + 1 if state_key == "error": error.append(line.split()[1]) logging.debug(line.split()[1]) logging.info('Requested Clusters for test %s: %d' % (my_uuid, cluster_count)) if cluster_count != 0: logging.debug(state.items()) logging.debug(status.items()) state_output = "Current clusters state: " + str( cluster_count) + " clusters" status_output = "Current clusters status: " + str( cluster_count) + " clusters" for i1 in state.items(): state_output += " (" + str(i1[0]) + ": " + str(i1[1]) + ")" for i2 in status.items(): status_output += " (" + str(i2[0]) + ": " + str(i2[1]) + ")" logging.info(state_output) logging.info(status_output) if error: logging.warning('Clusters in error state: %s' % error) time.sleep(delay) logging.info('Watcher exiting')
def write_yaml(cp: Codeplug, f) -> None: yaml = YAML() yaml.default_flow_style = None yaml.indent(None, 4, 2) register_yaml(yaml) dd = cp.as_dict() yaml.dump(dd, f)
def test_copysegmentation_dvid_to_zarr(setup_dvid_to_zarr): template_dir, config, volume, dvid_address, repo_uuid, output_file = setup_dvid_to_zarr # Modify the config from above to compute pyramid scales, # and choose a bounding box that is aligned with the bricks even at scale 2 # (just for easier testing). box_zyx = [[0, 0, 0], [256, 256, 256]] config["input"]["geometry"]["bounding-box"] = box_zyx config["copysegmentation"]["pyramid-depth"] = 2 yaml = YAML() yaml.default_flow_style = False with open(f"{template_dir}/workflow.yaml", 'w') as f: yaml.dump(config, f) execution_dir, _workflow = launch_flow(template_dir, 1) box_zyx = np.array(box_zyx) scale_0_vol = volume[box_to_slicing(*box_zyx)] scale_1_vol = downsample_labels(scale_0_vol, 2, True) scale_2_vol = downsample_labels(scale_1_vol, 2, True) store = zarr.NestedDirectoryStore(f"{execution_dir}/{output_file}") f = zarr.open(store, 'r') output_0_vol = f['s0'][box_to_slicing(*(box_zyx // 1))] output_1_vol = f['s1'][box_to_slicing(*(box_zyx // 2))] output_2_vol = f['s2'][box_to_slicing(*(box_zyx // 4))] assert (output_0_vol == scale_0_vol).all(), \ "Scale 0: Written vol does not match expected" assert (output_1_vol == scale_1_vol).all(), \ "Scale 1: Written vol does not match expected" assert (output_2_vol == scale_2_vol).all(), \ "Scale 2: Written vol does not match expected"
def _run_to_dvid(setup, check_scale_0=True): template_dir, config, volume, dvid_address, repo_uuid, output_segmentation_name = setup yaml = YAML() yaml.default_flow_style = False # re-dump config in case it's been changed by a specific test with open(f"{template_dir}/workflow.yaml", 'w') as f: yaml.dump(config, f) _execution_dir, workflow = launch_flow(template_dir, 1) final_config = workflow.config input_box_xyz = np.array(final_config['input']['geometry']['bounding-box']) input_box_zyx = input_box_xyz[:, ::-1] expected_vol = extract_subvol(volume, input_box_zyx) output_box_xyz = np.array( final_config['output']['geometry']['bounding-box']) output_box_zyx = output_box_xyz[:, ::-1] output_vol = fetch_raw(dvid_address, repo_uuid, output_segmentation_name, output_box_zyx, dtype=np.uint64) np.save('/tmp/output_vol.npy', output_vol) np.save('/tmp/expected_vol.npy', expected_vol) if check_scale_0: assert (output_vol == expected_vol).all(), \ "Written vol does not match expected" return input_box_zyx, expected_vol, output_vol
def dump(obj: dict, default_flow_style=False) -> str: yaml = YAML() yaml.default_flow_style = default_flow_style yaml.preserve_quotes = True stream = StringIO() yaml.dump(obj, stream) return stream.getvalue()
def write_yaml(): add_constructor( resolver.BaseResolver.DEFAULT_MAPPING_TAG, lambda loader, node: OrderedDict(loader.construct_pairs(node))) yaml = YAML() yaml.default_flow_style = False i = 1 if os.path.isfile("fruits.yaml"): os.remove("fruits.yaml") loaded_array = np.load( '/home/oza/pre-experiment/speeding/distiller/distiller/apputils/simple_gene.npz' ) with open("fruits.yaml", "a") as yf: yaml.dump( { "version": i, "pruners": { "conv1_pruner": { "class": 'AutomatedGradualPruner', 'initial_sparsity': float(loaded_array['array_1']), 'final_sparsity': 0.3, 'weights': '[module.first.conv.weight]' } }, "fruit_2": { "name": "orange", "price": "200" } }, yf)
def to_yaml(self, filepath: typing.Union[str, pathlib.Path]) -> None: """Write to a YAML file.""" spec = self.to_spec() yaml = YAML() yaml.default_flow_style = False with open(filepath, "w") as fd: yaml.dump(spec, fd)
def worker(config, bucket_name, prefix, suffix, func, unsafe, sources_policy, queue): dc = datacube.Datacube(config=config) index = dc.index s3 = boto3.resource("s3") safety = 'safe' if not unsafe else 'unsafe' while True: try: key = queue.get(timeout=60) if key == GUARDIAN: break logging.info("Processing %s %s", key, current_process()) obj = s3.Object(bucket_name, key).get(ResponseCacheControl='no-cache') raw = obj['Body'].read() if suffix == AWS_PDS_TXT_SUFFIX: # Attempt to process text document raw_string = raw.decode('utf8') txt_doc = _parse_group(iter( raw_string.split("\n")))['L1_METADATA_FILE'] data = make_metadata_doc(txt_doc, bucket_name, key) else: yaml = YAML(typ=safety, pure=False) yaml.default_flow_style = False data = yaml.load(raw) uri = get_s3_url(bucket_name, key) logging.info("calling %s", func) func(data, uri, index, sources_policy) queue.task_done() except Empty: break except EOFError: break
def write_to_dir(self, dir_path: str) -> None: hosts = {"all": self.root.dump()} path = Path(dir_path) / "hosts.yaml" yaml = YAML() yaml.default_flow_style = False with open(path, 'w') as f: yaml.dump(hosts, f)
def make_config(database_dir, threads, assembler, data_type='metagenome', interleaved_fastq=False, config='config.yaml'): """ Reads template config file with comments from ./template_config.yaml updates it by the parameters provided. Args: config (str): output file path for yaml database_dir (str): location of downloaded databases threads (int): number of threads per node to utilize assembler (str): either spades or megahit data_type (str): this is either metagenome or metatranscriptome """ from ruamel.yaml import YAML #used for yaml reading with comments yaml = YAML() yaml.version = (1, 1) yaml.default_flow_style = False template_conf_file = os.path.join( os.path.dirname(os.path.abspath(__file__)), "template_config.yaml") with open(template_conf_file) as template_config: conf = yaml.load(template_config) conf["tmpdir"] = tempfile.gettempdir() conf["threads"] = multiprocessing.cpu_count() if not threads else threads conf["preprocess_adapters"] = os.path.join(database_dir, "adapters.fa") conf["contaminant_references"] = { "PhiX": os.path.join(database_dir, "phiX174_virus.fa") } if data_type == 'metatranscriptome': conf["contaminant_references"]["rRNA"] = os.path.join( database_dir, "silva_rfam_all_rRNAs.fa"), conf["data_type"] = data_type conf["interleaved_fastqs"] = interleaved_fastq conf["assembler"] = assembler conf["database_dir"] = database_dir #conf["refseq_namemap"] = os.path.join(database_dir, "refseq.db") #conf["refseq_tree"] = os.path.join(database_dir, "refseq.tree") #conf["diamond_db"] = os.path.join(database_dir, "refseq.dmnd") if os.path.exists(config): logging.warning( f"Config file {config} already exists, I didn't dare to overwrite it. continue..." ) else: with open(config, "w") as f: yaml.dump(conf, f) logging.info("Configuration file written to %s\n" "You may want to edit it using any text editor." % config)
def serialize(self, out): def nested_set(target, path, value): value = self.serialize_value(value) if len(path) > 1: if len(path) >= 2 and path[1] and path[1][0] == '[' and path[ 1][-1] == ']' and path[1][1:-1].isdigit(): if path[0] not in target: target[path[0]] = [] if len(path) > 2: new_value = CommentedMap() nested_set(new_value, path[2:], value) target[path[0]].append(new_value) else: target[path[0]].append(value) else: # Add empty dict in case there is value and we # expect dict if path[0] not in target or not isinstance( target[path[0]], dict): target[path[0]] = CommentedMap() nested_set(target[path[0]], path[1:], value) else: target[path[0]] = value units = CommentedMap() for unit in self.unit_iter(): nested_set(units, unit.getid().split('->'), unit.target) yaml = YAML() yaml.default_flow_style = False yaml.dump(self.get_root_node(units), out)
def dict_to_yaml(data, width=None, sort=False): """ Convert dictionary into yaml """ output = StringIO() # Set formatting options yaml = YAML() yaml.indent(mapping=4, sequence=4, offset=2) yaml.default_flow_style = False yaml.allow_unicode = True yaml.encoding = 'utf-8' yaml.width = width # Make sure that multiline strings keep the formatting data = copy.deepcopy(data) scalarstring.walk_tree(data) # Sort the data https://stackoverflow.com/a/40227545 if sort: sorted_data = CommentedMap() for key in sorted(data): sorted_data[key] = data[key] data = sorted_data yaml.dump(data, output) return output.getvalue()
def generate_new_docker_compose(ctx, branch, asset, image): """ TODO """ filename = 'docker-compose-assets-' + branch + '.yml' dirname = "./" click.echo('Writing new docker compose file into: {}'.format(filename)) yaml = YAML(typ='safe', pure=True) yaml.default_flow_style = True dic = {} def filtered_yaml(): coins = branch + '_coins_assets' for assetchain_key in ctx.assetchains[coins].split(', '): x = ctx.new_config_data['assetchains'][assetchain_key] dic[assetchain_key] = x if asset and asset == assetchain_key: pass elif asset: pass else: pass return dic template = env.get_template('docker-compose-new-template.conf.j2') templatized_config = template.render(items=filtered_yaml(), seed_ip=ctx.seed_ip2, mined=ctx.mined_coins, btcpubkey=ctx.btcpubkey, image_name=image) ctx.write_config(dirname, filename=filename, templatized_config=templatized_config)
def add_dict_in_yaml(in_dct, out_f, encoding='utf-8', flow_style=False): """ Добавляет словарь в YAML-файл. Если YAML-файл не существует, создаёт его. Если существует, дозаписывает словарь в конец файла. :param in_dct: словарь для добавления :param out_f: YAML-файл :param encoding: кодировка YAML-файла :param flow_style: True or False """ yaml = YAML(pure=True) yaml.default_flow_style = flow_style sep = '' if Path(out_f).exists(): mode = 'a' if Path(out_f).stat().st_size: sep = '---\n' else: mode = 'w' Path(out_f).parent.mkdir(parents=True, exist_ok=True) with open(Path(out_f), mode, encoding=encoding) as f_out: if sep: f_out.write(sep) yaml.dump(in_dct, f_out)
def write_permuted_configs_to_file(self, root_path): """Write all permuted configurations to files. All config files are named afters its ID. These will be plased in the configs directory of the to be created root directoy. In addition, an info.txt is being created that shows the used permutation for each file. Arguments: root_path {str}: Name of the target root directory """ # Create directories if not os.path.exists(root_path) or not os.path.exists(root_path + "configs/"): os.makedirs(root_path + "configs/") # Write config files yaml = YAML() yaml.default_flow_style = False for i, item in enumerate(self._final_configs): config, permutation = item # Add the permutation to the config to easily keep track of it config["permutation"] = permutation # Write config to file, but check whethere the file already exists f = open(root_path + "configs/" + str(i) + ".yaml", "x") yaml.dump(config, f) # Create/Append info.txt to store the config's ID along with its used permutation f = open(root_path + "info.txt", "a") f.write(str(i) + ": " + str(permutation) + "\n\n")
def list_products_yaml(self, hostname, system): from ruamel.yaml import YAML yml = YAML(typ='safe', pure=False) yml.default_flow_style = False yml.explicit_end = True yml.explicit_start = True yml.indent(mapping=4, sequence=4, offset=2) data = system.to_refhost_dict() data["name"] = str(hostname) yml.dump(data, self.output)
def serialize(self, out): def nested_set(target, path, value): value = self.serialize_value(value) if len(path) > 1: if len(path) == 2 and path[1] and path[1][0] == '[' and path[1][-1] == ']' and path[1][1:-1].isdigit(): if path[0] not in target: target[path[0]] = [] target[path[0]].append(value) else: # Add empty dict in case there is value and we # expect dict if path[0] not in target or not isinstance(target[path[0]], dict): target[path[0]] = CommentedMap() nested_set(target[path[0]], path[1:], value) else: target[path[0]] = value units = CommentedMap() for unit in self.unit_iter(): nested_set(units, unit.getid().split('->'), unit.target) yaml = YAML() yaml.default_flow_style = False yaml.dump(self.get_root_node(units), out)
import datetime import re import os from ruamel.yaml import YAML yaml = YAML(typ='safe') yaml.default_flow_style = False yaml.representer.ignore_aliases = lambda x: True def iter_files(): outdir = 'engagements' for filename in os.listdir(outdir): fn = os.path.join(outdir, filename) state, ext = os.path.splitext(fn) state = state.split("/")[1] if ext == '.yml': with open(fn, 'r') as fp: data = yaml.load(fp) yield (state, data from arpeggio import Optional, ZeroOrMore, OneOrMore, EOF, UnorderedGroup, OrderedChoice, Sequence from arpeggio import RegExMatch as _ from arpeggio import ParserPython def ordinal(): return _('\d+(st|nd|rd|th)') def punct(): return _("[.,;:]")
def yaml() -> YAML: """Return default YAML parser.""" yamlp = YAML(typ='safe', pure=True) yamlp.preserve_quotes = True yamlp.default_flow_style = False return yamlp