def load_config(cls: Type[AnyExecutor], source: Union[str, TextIO], separated_workspace: bool = False, pea_id: int = 0) -> AnyExecutor: """Build an executor from a YAML file. :param filename: the file path of the YAML file or a ``TextIO`` stream to be loaded from :param separated_workspace: the dump and data files associated to this executor will be stored separately for each parallel pea, which will be indexed by the ``pea_id`` :param pea_id: the id of the storage of this parallel pea, only effective when ``separated_workspace=True`` :return: an executor object """ if not source: raise FileNotFoundError source = get_local_config_source(source) # first scan, find if external modules are specified with (open(source, encoding='utf8') if isinstance(source, str) else source) as fp: # ignore all lines start with ! because they could trigger the deserialization of that class safe_yml = '\n'.join(v if not re.match(r'^[\s-]*?!\b', v) else v.replace('!', '__tag: ') for v in fp) tmp = yaml.load(safe_yml) if tmp: if 'metas' not in tmp: tmp['metas'] = {} tmp = fill_metas_with_defaults(tmp) if 'py_modules' in tmp['metas'] and tmp['metas']['py_modules']: mod = tmp['metas']['py_modules'] if isinstance(mod, str): mod = [mod] if isinstance(mod, list): mod = [m if os.path.isabs(m) else os.path.join(os.path.dirname(source), m) for m in mod] PathImporter.add_modules(*mod) else: raise TypeError(f'{type(mod)!r} is not acceptable, only str or list are acceptable') tmp['metas']['separated_workspace'] = separated_workspace tmp['metas']['pea_id'] = pea_id else: raise EmptyExecutorYAML(f'{source} is empty? nothing to read from there') tmp = expand_dict(tmp) stream = StringIO() yaml.dump(tmp, stream) tmp_s = stream.getvalue().strip().replace('__tag: ', '!') return yaml.load(tmp_s)
def reformat_yaml(yaml_string, show_file=False): """Reformat a yaml config string.""" yaml_obj = YAML(typ="mpf-rt", plug_ins=["mpf.file_interfaces.yaml_roundtrip"]) if show_file: yaml_obj.indent(mapping=2, sequence=2, offset=0) else: yaml_obj.indent(mapping=2, sequence=4, offset=2) yaml_obj.preserve_quotes = True yaml_obj.width = 10000 data = yaml_obj.load(yaml_string) string_stream = StringIO() yaml_obj.dump(data, string_stream) formatted_yaml_string = string_stream.getvalue() # if show_file: # formatted_yaml_string = re.sub(r'^ ', '', formatted_yaml_string, flags=re.MULTILINE) return formatted_yaml_string
def to_swarm_yaml(self, image: str = 'gnes/gnes:latest-alpine') -> str: """ Generate the docker swarm YAML compose file :param image: the default GNES docker image :return: the generated YAML compose file """ from ruamel.yaml import YAML, StringIO _yaml = YAML() swarm_yml = {'version': '3.4', 'services': {}} for k, v in self._service_nodes.items(): defaults_kwargs, _ = service_map[ v['service']]['parser']().parse_known_args( ['--yaml_path', 'TrainableBase']) non_default_kwargs = { k: v for k, v in vars(v['parsed_args']).items() if getattr(defaults_kwargs, k) != v } if not isinstance(non_default_kwargs.get('yaml_path', ''), str): non_default_kwargs['yaml_path'] = v['kwargs']['yaml_path'] num_replicas = None if 'num_parallel' in non_default_kwargs: num_replicas = non_default_kwargs.pop('num_parallel') swarm_yml['services'][k] = { 'image': v['kwargs'].get('image', image), 'command': '%s %s' % (service_map[v['service']]['cmd'], ' '.join([ '--%s %s' % (k, v) for k, v in non_default_kwargs.items() ])) } if num_replicas and num_replicas > 1: swarm_yml['services'][k]['deploy'] = {'replicas': num_replicas} stream = StringIO() _yaml.dump(swarm_yml, stream) return stream.getvalue().strip()
def update(log, data, deployment): processed_data = [] yaml = YAML() yaml.default_flow_style = False yaml.preserve_quotes = True for doc in data.split("---\n"): doc = doc.strip() if len(doc) > 0: try: doc = yaml.load(doc) update_probes(log, doc, deployment) update_labels(log, doc, deployment) update_resources(log, doc, deployment) stream = StringIO() yaml.dump(doc, stream) doc = stream.getvalue() except DuplicateKeyError as e: log.error(f'...... Constructor error: {e}') log.error(f'...... data: {doc}') pass except ScannerError as e: log.error(f'...... Scanner error: {e}') log.error(f'...... data: {doc}') pass except ParserError as e: log.error(f'...... Parser error: {e}') log.error(f'...... data: {doc}') pass processed_data.append(doc) return "---\n".join(processed_data)
def __init__(self, args): self.args = args self.logger = set_logger(self.__class__.__name__, self.args.verbose) with args.yaml_path: tmp = _yaml.load(args.yaml_path) stream = StringIO() _yaml.dump(tmp, stream) self.original_yaml = stream.getvalue().strip() self._name = tmp.get('name', args.name) self._port = tmp.get('port', args.port) self._networks = tmp.get('networks', {}) self._volumes = tmp.get('volumes', {}) self._layers = [] # type: List['YamlComposer.Layer'] self._num_layer = 0 if 'services' in tmp: self.add_layer() self.add_comp( CommentedMap({ 'name': 'gRPCFrontend', 'grpc_port': self._port })) for comp in tmp['services']: self.add_layer() if isinstance(comp, list): for c in comp: self.add_comp(c) elif self.check_fields(comp): self.add_comp(comp) else: raise ValueError(comp) else: self.logger.error( 'yaml file defines an empty graph! no "component" field exists!' )
def test_train_nlu_success( rasa_app: SanicTestClient, default_stack_config: Text, default_nlu_data: Text, default_domain_path: Text, tmp_path: Path, ): domain_data = rasa.shared.utils.io.read_yaml_file(default_domain_path) config_data = rasa.shared.utils.io.read_yaml_file(default_stack_config) nlu_data = rasa.shared.utils.io.read_yaml_file(default_nlu_data) # combine all data into our payload payload = { key: val for d in [domain_data, config_data, nlu_data] for key, val in d.items() } data = StringIO() rasa.shared.utils.io.write_yaml(payload, data) _, response = rasa_app.post( "/model/train", data=data.getvalue(), headers={"Content-type": rasa.server.YAML_CONTENT_TYPE}, ) assert response.status == 200 # save model to temporary file model_path = str(tmp_path / "model.tar.gz") with open(model_path, "wb") as f: f.write(response.body) # unpack model and ensure fingerprint is present model_path = unpack_model(model_path) assert os.path.exists(os.path.join(model_path, "fingerprint.json"))
def build_dockerswarm(all_layers: List['YamlComposer.Layer'], docker_img: str = 'gnes/gnes:latest', volumes: Dict = None, networks: Dict = None) -> str: with resource_stream('gnes', '/'.join(('resources', 'compose', 'gnes-swarm.yml'))) as r: swarm_lines = _yaml.load(r) config_dict = {} for l_idx, layer in enumerate(all_layers): for c_idx, c in enumerate(layer.components): c_name = '%s%d%d' % (c['name'], l_idx, c_idx) args = ['--%s %s' % (a, str(v) if ' ' not in str(v) else ('"%s"' % str(v))) for a, v in c.items() if a in YamlComposer.comp2args[c['name']] and a != 'yaml_path' and v] if 'yaml_path' in c and c['yaml_path'] is not None: if c['yaml_path'].endswith('.yml') or c['yaml_path'].endswith('.yaml'): args.append('--yaml_path /%s_yaml' % c_name) config_dict['%s_yaml' % c_name] = {'file': c['yaml_path']} else: args.append('--yaml_path %s' % c['yaml_path']) if l_idx + 1 < len(all_layers): next_layer = all_layers[l_idx + 1] _l_idx = l_idx + 1 else: next_layer = all_layers[0] _l_idx = 0 host_out_name = '' for _c_idx, _c in enumerate(next_layer.components): if _c['port_in'] == c['port_out']: host_out_name = '%s%d%d' % (_c['name'], _l_idx, _c_idx) break if l_idx - 1 >= 0: last_layer = all_layers[l_idx - 1] _l_idx = l_idx - 1 else: last_layer = all_layers[-1] _l_idx = len(all_layers) - 1 host_in_name = '' for _c_idx, _c in enumerate(last_layer.components): if _c['port_out'] == c['port_in']: host_in_name = '%s%d%d' % (_c['name'], _l_idx, _c_idx) break if 'BIND' not in c['socket_out']: args.append('--host_out %s' % host_out_name) if 'BIND' not in c['socket_in']: args.append('--host_in %s' % host_in_name) cmd = '%s %s' % (YamlComposer.comp2file[c['name']], ' '.join(args)) swarm_lines['services'][c_name] = CommentedMap({ 'image': docker_img, 'command': cmd, }) rep_c = YamlComposer.Layer.get_value(c, 'replicas') if rep_c > 1: swarm_lines['services'][c_name]['deploy'] = CommentedMap({ 'replicas': YamlComposer.Layer.get_value(c, 'replicas'), 'restart_policy': { 'condition': 'on-failure', 'max_attempts': 3, } }) if 'yaml_path' in c and c['yaml_path'] is not None \ and (c['yaml_path'].endswith('.yml') or c['yaml_path'].endswith('.yaml')): swarm_lines['services'][c_name]['configs'] = ['%s_yaml' % c_name] if c['name'] == 'Frontend': swarm_lines['services'][c_name]['ports'] = ['%d:%d' % (c['grpc_port'], c['grpc_port'])] if volumes: swarm_lines['volumes'] = volumes if networks: swarm_lines['networks'] = volumes swarm_lines['configs'] = config_dict stream = StringIO() _yaml.dump(swarm_lines, stream) return stream.getvalue().strip()
def yaml_spec(self): yaml.register_class(Flow) stream = StringIO() yaml.dump(self, stream) return stream.getvalue().strip()
def log(self, key, data, dtype, options: LogOptions = None): """ handler function for writing data to the server. Can be called directly. :param key: :param data: :param dtype: :param options: :return: """ # todo: overwrite mode is not tested and not in-use. write_mode = "w" if options and options.overwrite else "a" if dtype == "log": abs_path = os.path.join(self.data_dir, key) try: with open(abs_path, write_mode + 'b') as f: dill.dump(data, f) except FileNotFoundError: os.makedirs(os.path.dirname(abs_path)) with open(abs_path, write_mode + 'b') as f: dill.dump(data, f) if dtype == "byte": abs_path = os.path.join(self.data_dir, key) try: with open(abs_path, write_mode + 'b') as f: f.write(data) except FileNotFoundError: os.makedirs(os.path.dirname(abs_path)) with open(abs_path, write_mode + 'b') as f: f.write(data) elif dtype.startswith("text"): abs_path = os.path.join(self.data_dir, key) try: with open(abs_path, write_mode + "+") as f: f.write(data) except FileNotFoundError: os.makedirs(os.path.dirname(abs_path)) with open(abs_path, write_mode + "+") as f: f.write(data) elif dtype.startswith("yaml"): import ruamel.yaml if ruamel.yaml.version_info < (0, 15): yaml = ruamel.yaml StringIO = ruamel.yaml.StringIO load_fn = yaml.safe_load else: from ruamel.yaml import YAML, StringIO yaml = YAML() yaml.explict_start = True load_fn = yaml.load stream = StringIO() yaml.dump(data, stream) output = stream.getvalue() abs_path = os.path.join(self.data_dir, key) try: with open(abs_path, write_mode + "+") as f: if options.write_mode == 'key': d = load_fn('\n'.join(f)) if d is not None: d.update(output) output = d f.write(output) except FileNotFoundError: os.makedirs(os.path.dirname(abs_path)) with open(abs_path, write_mode + "+") as f: if options.write_mode == 'key': d = load_fn('\n'.join(f)) if d is not None: d.update(output) output = d f.write(output) elif dtype.startswith("image"): abs_path = os.path.join(self.data_dir, key) if "." not in key: abs_path = abs_path + ".png" from PIL import Image assert data.dtype in ALLOWED_TYPES, "image datatype must be one of {}".format( ALLOWED_TYPES) if len(data.shape) == 3 and data.shape[-1] == 1: data.resize(data.shape[:-1]) im = Image.fromarray(data) try: im.save(abs_path) except FileNotFoundError: os.makedirs(os.path.dirname(abs_path)) im.save(abs_path)
def dumps(self, training_data: "TrainingData") -> Text: """Turns TrainingData into a string.""" stream = StringIO() self.dump(stream, training_data) return stream.getvalue()
def pb_to_yaml(message): result = MessageToJson(message) yaml = YAML() io = StringIO() yaml.dump(result, io) return io.getvalue()
def to_dict(raw_yaml): return YAML().load(StringIO(raw_yaml))
def dumps(self, data) -> str: """Dump to a string... who would want such a thing? One can dump to a file or stdout.""" output = StringIO() self.dump(data, output, transform=None) return output.getvalue()
def loads(self, string: str): """Load YAML from a string... that unusual use case in a world of files only.""" return self.load(StringIO(string))