def build(ctx, explainer_filepath, dashboard_filepath): click.echo(explainer_ascii) if explainer_filepath is None: if (Path().cwd() / "explainer.yaml").exists(): explainer_filepath = Path().cwd() / "explainer.yaml" else: click.echo("No argument given to explainerdashboard build and " "could not find an explainer.yaml. Aborting.") return if str(explainer_filepath).endswith(".yaml"): explainer_config = yaml.safe_load(open(str(explainer_filepath), "r")) click.echo( f"explainerdashboard ===> Building {explainer_config['explainer']['explainerfile']}" ) if (dashboard_filepath is not None and str(dashboard_filepath).endswith(".yaml") and Path(dashboard_filepath).exists()): click.echo( f"explainerdashboard ===> Using {dashboard_filepath} to calculate explainer properties" ) dashboard_config = yaml.safe_load( open(str(dashboard_filepath), "r")) else: dashboard_config = None print( f"explainerdashboard ===> Building {explainer_config['explainer']['explainerfile']}" ) build_and_dump_explainer(explainer_config, dashboard_config) print(f"explainerdashboard ===> Build finished!") return
def test_deploy(self, mock_adapter): mock_adapter.return_value.get_pools.side_effect = lambda x: ["foo"] from dcosdeploy.modules import edgelb manager = edgelb.EdgeLbPoolsManager() # Test update pool = edgelb.EdgeLbPool("edgelb/api", "foo", oyaml.safe_load(POOL_DEF), "foobar") self.assertTrue(manager.deploy(pool)) #mock_adapter.return_value.update_pool.assert_called_once() mock_adapter.return_value.update_pool_template.assert_called_with("edgelb/api", "foo", "foobar") # Test create pool = edgelb.EdgeLbPool("edgelb/api", "bar", oyaml.safe_load(POOL_DEF), None) self.assertTrue(manager.deploy(pool))
def test_settings(mock_init_config, mock_init_history, mock_get_config, mock_get_completer, mock_save_config): with open("tests/data/test-config.yaml") as f: with open("tests/data/test-modified-config.yaml") as fm: config_json = yaml.safe_load(f) modified_json = yaml.safe_load(fm) mock_get_completer.return_value = {"commands": {}} mock_get_config.return_value = config_json # test init settings = kafkashell.settings.Settings() mock_init_config.assert_called_once() mock_init_history.assert_called_once() mock_get_config.assert_called_once() mock_get_completer.assert_called_once() assert settings.enable_help is True assert settings.enable_auto_complete is True assert settings.enable_auto_suggest is True assert settings.cluster == "local" # test set_enable_help settings.set_enable_help(False) assert settings.enable_help is False # test set_enable_fuzzy_search settings.set_enable_fuzzy_search(False) assert settings.enable_fuzzy_search is False # test set_next_cluster & get_cluster_details settings.set_next_cluster() assert settings.cluster == "test" assert settings.get_cluster_details() == config_json["clusters"]["test"] # test save_settings settings.save_settings() mock_save_config.assert_called_once_with(modified_json) # test save_settings when enableSaveOnExit is false mock_save_config.reset_mock() settings.enable_save_on_exit = False settings.save_settings() assert not mock_save_config.called # test things can change back settings.set_enable_help(True) assert settings.enable_help is True settings.set_enable_fuzzy_search(True) assert settings.enable_fuzzy_search is True settings.set_next_cluster() assert settings.cluster == "local" assert settings.get_cluster_details() == config_json["clusters"]["local"]
def get_template(yaml_path, base_default=False): default_template_path = join( HERE, DEPLOYMENT_CONFIGURATION_PATH, basename(yaml_path)) dict_template = {} if base_default and exists(default_template_path): with open(default_template_path) as f: dict_template = yaml.safe_load(f) if exists(yaml_path): with open(yaml_path) as f: override_tpl = yaml.safe_load(f) if override_tpl: dict_template = dict_merge(dict_template or {}, override_tpl) return dict_template or {}
def test_dry_run(self, mock_adapter): mock_adapter.return_value.get_pools.side_effect = lambda x: ["foo"] mock_adapter.return_value.get_pool.side_effect = lambda x, y: oyaml.safe_load(SERVER_POOL_DEF) from dcosdeploy.modules import edgelb manager = edgelb.EdgeLbPoolsManager() # not existing pool pool = edgelb.EdgeLbPool("edgelb/api", "bar", dict(), None) self.assertTrue(manager.dry_run(pool)) # existing pool, no change pool = edgelb.EdgeLbPool("edgelb/api", "foo", oyaml.safe_load(POOL_DEF), None) self.assertFalse(manager.dry_run(pool)) # existing pool, change pool = edgelb.EdgeLbPool("edgelb/api", "foo", oyaml.safe_load(POOL_DEF_CHANGED), None) self.assertTrue(manager.dry_run(pool))
def update_download_counts(self, force=False): if force or not(self._counts_fetched): counts_url = self._store_path_builder.download_counts() counts_str = su.get_file_to_string(counts_url) if counts_str != '': self.download_counts = yaml.safe_load(counts_str).get('modules',{}) self._counts_fetched = True
def create_context(config_var_file): # read the metafile to get variables and values try: open(config_var_file, 'r') except IOError as ioe: fail_message = """{}Note: {} not found. Default variables for snippet stack will be used.{} """.format(Fore.YELLOW, config_var_file, Style.RESET_ALL) print(fail_message) return None try_json = False with open(config_var_file, 'r') as f: try: variables = oyaml.safe_load(f.read()) except ScannerError: try_json = True if try_json: with open(config_var_file, 'r') as f: try: variables = json.load(f) except json.decoder.JSONDecodeError: print( "Configuration file could not be decoded as YAML or JSON!") exit(1) # grab the metadata values and convert to key-based dictionary jinja_context = dict() for snippet_var in variables['variables']: jinja_context[snippet_var['name']] = snippet_var['value'] return jinja_context
def init_from_existing_tutorial(self, tuto_name): """Init a tutorial instance from an existing tutorial (data library and tutorial.md).""" self.name = tuto_name self.set_dir_name() if not self.exists(): raise Exception("The tutorial %s does not exists. It should be created" % self.name) # get the metadata information of the tutorial (from the top of the tutorial.md) with open(self.tuto_fp, "r") as tuto_f: tuto_content = tuto_f.read() regex = r'^---\n(?P<metadata>[\s\S]*)\n---(?P<body>[\s\S]*)' tuto_split_regex = re.search(regex, tuto_content) if not tuto_split_regex: raise Exception("No metadata found at the top of the tutorial") metadata = yaml.safe_load(tuto_split_regex.group("metadata")) self.title = metadata["title"] self.zenodo_link = metadata["zenodo_link"] self.questions = metadata["questions"] self.objectives = metadata["objectives"] self.time_estimation = metadata["time_estimation"] self.key_points = metadata["key_points"] self.contributors = metadata["contributors"] # the tutorial content self.body = tuto_split_regex.group("body") # get the data library self.init_data_lib()
def test_anchors_and_references(): text = """ defaults: all: &all product: foo development: &development <<: *all profile: bar development: platform: <<: *development host: baz """ expected_load = { "defaults": { "all": { "product": "foo" }, "development": { "product": "foo", "profile": "bar" }, }, "development": { "platform": { "host": "baz", "product": "foo", "profile": "bar" } }, } assert yaml.safe_load(text) == expected_load
def load(self, filename): with open(path_expand(filename), 'r') as f: content = yaml.safe_load(f) self.names = content["names"] self.data = content["data"] self.name = content["name"] self.servers = len(self.names)
def load(self, path): data = {"wrong": "True"} if os.path.exists(path): with open(path, 'rb') as dbfile: data = yaml.safe_load(dbfile) or dict() else: prefix = os.path.dirname(path) if not os.path.exists(prefix): os.makedirs(prefix) config = Config() user = config["cloudmesh.profile.user"] if user == "TBD": print( "WARNING: please set cloudmesh.profile.user we found TBD") data = { 'counter': 1, 'path': path, 'kind': 'vm', 'schema': "{user}-{kind}-{counter}", 'user': user } self.flush(data) return data
def load(self, path): data = {"wrong": "True"} if os.path.exists(path): with open(path, 'rb') as dbfile: data = yaml.safe_load(dbfile) or dict() else: prefix = os.path.dirname(path) if not os.path.exists(prefix): os.makedirs(prefix) # data = { # 'counter': 1, # 'path': path, # 'kind': "vm", # 'schema': "{experiment}-{group}-{user}-{kind}-{counter}", # 'experiment': 'exp', # 'group': 'group', # 'user': '******' # } config = Config() user = config["cloudmesh.profile.user"] data = { 'counter': 1, 'path': path, 'schema': "{user}-vm-{counter}", 'user': user } self.flush(data) return data
def load_music(self, path): try: with open(path, 'r', encoding='utf-8') as stream: music_list = yaml.safe_load(stream) prepath = '' for item in music_list: # deprecated, use 'replace_music' area pref instead # if 'replace' in item: # self.replace_music = item['replace'] == True if 'use_unique_folder' in item and item[ 'use_unique_folder'] == True: prepath = os.path.splitext(os.path.basename(path))[0] + '/' if 'category' not in item: continue if 'songs' in item: for song in item['songs']: song['name'] = prepath + song['name'] self.music_list = music_list except ValueError: raise except AreaError: raise
def main(): s = open(rospy.get_param('requested')) yaml_name = rospy.get_param('requested')[ rospy.get_param('requested').rfind('/')+1: rospy.get_param('requested').rfind('.yaml')] # get the yaml as a dict master = yaml.safe_load(s) # get lsit of all coordinates that the master dict maps out coordinates = linear_combinations(master) # create a world xacro and subsiquent world file for each coordinate for num, i in enumerate(coordinates): create_xacro_file(xacro_target=rospy.get_param('world_xacro_target') + yaml_name + str(num) + '.world.xacro', requested_macros=world_gen(coordinate=i, master=master), boiler_plate_top='<?xml version="1.0" ?>\n' + '<sdf version="1.6" ' + 'xmlns:xacro="http://ros.org/wiki/xacro">\n' + '<!-- COORDINATE: ' + str(i) + ' -->\n' + '<world name="robotx_example_course">\n' + ' <xacro:include filename="$(find vrx_gazebo)' + '/worlds/xacros/include_all_xacros.xacro" />\n' + ' <xacro:include_all_xacros />\n', boiler_plate_bot='</world>\n</sdf>') os.system('rosrun xacro xacro --inorder -o' + rospy.get_param('world_target') + yaml_name + str(num) + '.world ' + rospy.get_param('world_xacro_target') + yaml_name + str(num) + '.world.xacro') print 'All ', len(coordinates), ' worlds generated'
def load_yaml(filepath): """Load a yaml file into a python dict""" def read_yaml_file(filepath): """ Read a YAML file :param str filepath: path to the file to read :return dict: read data """ with open(filepath, "r") as f: data = yaml.safe_load(f) return data if is_url(filepath): _LOGGER.debug(f"Got URL: {filepath}") try: # python3 from urllib.error import HTTPError from urllib.request import urlopen except: # python2 from urllib2 import URLError as HTTPError from urllib2 import urlopen try: response = urlopen(filepath) except HTTPError as e: raise e data = response.read() # a `bytes` object text = data.decode("utf-8") return yaml.safe_load(text) else: return read_yaml_file(filepath)
def load_character_data(self, path="config/character_data.yaml"): """ Load all the character-specific information such as movement delay, keys, etc. :param path: filepath to the YAML file. """ try: if not os.path.isfile(path): raise with open(path, "r") as chars: data = yaml.safe_load(chars) except Exception: raise AreaError( f"Hub {self.name} trying to load character data: File path {path} is invalid!" ) try: for char in data.copy(): # Convert the old numeric way to store character data into character folder based one if isinstance(char, int) and self.is_valid_char_id(char): data[self.char_list[char]] = data.pop(char) self.character_data = data except Exception: raise AreaError( "Something went wrong while loading the character data!")
def test_manyfesto(self): # deal with path issues import os from pathlib import Path manyfesto_path = os.path.dirname( os.path.dirname(os.path.abspath(__file__))) import sys if manyfesto_path not in sys.path: # add parent dir to paths print('Adding ', manyfesto_path, "to system path.") sys.path.append(manyfesto_path) import oyaml from manyfesto import read print('\n-------------- Tests Output -----------------') for test_num in range(num_tests): test_folder = Path(manyfesto_path) / Path(r"tests/test" + str(test_num) + "/") container_folder = test_folder / 'container' correct_read_file = test_folder / Path('correct_output.yaml') output = read(str(container_folder)) with correct_read_file.open('r') as f: correct_read_odict = oyaml.safe_load(f) assert output == correct_read_odict, "Error in matching Test " + str(test_num) + " output: \n %s" % \ oyaml.dump(output, default_flow_style=False) print("Ran test " + str(test_num) + ".")
def update_photo_list(self, edges_list, photo_list_path, photo_folder="."): try: with open(photo_list_path, "r") as f: if "json" in photo_list_path: photo_list = json.load(f) else: photo_list = yaml.safe_load(f) except Exception as e: self.log.e("update_photo_list", f"Error opening photo list file at {photo_list_path}:\n{e}") new_posts_list = [] last_post = photo_list["photos"][0] last_timestamp = last_post["timestamp"] for edge in edges_list: node = edge["node"] self.log.i("update_photo_list",f"Last post timestamp: {last_timestamp}\nLast downloaded post {node['taken_at_timestamp']}") if "taken_at_timestamp" in node and int(node["taken_at_timestamp"]) > int(last_timestamp): self.log.i("update_photo_list",f"Found new post! {node['taken_at_timestamp']} {self.get_post_metadata(node, ['location','name'])}") new_post = self.post_from_metadata(node, photo_folder) new_posts_list.append(new_post) if len(new_posts_list) > 0: filtered_list = self.filter_sort_photos(new_posts_list, excluded_keys=["datetime_obj"]) self.log.i("update_photo_list",f"New posts to add {len(filtered_list)}") self.log.i("update_photo_list",f"Total posts before: {len(photo_list['photos'])}") for i in reversed(range(0, len(filtered_list))): photo_list["photos"].insert(0, filtered_list[i]) self.log.i("update_photo_list",f"Total posts after: {len(photo_list['photos'])}") self.export_to_file(photo_list, photo_list_path) self.log.i("update_photo_list",f"Updated file {photo_list_path}") self.log.s("update_photo_list",f"Added {len(filtered_list)} new instagram posts to your website! {str(filtered_list)}") os.system("sudo JEKYLL_ENV=production bundle exec jekyll build") else: self.log.s("update_photo_list",f"All up to date!") return photo_list
async def loadfilter (self, filterpath=None, filtername=None, filterstring=None, filter=None): if filterpath != None: self.filterpath = filterpath if filtername != None: self.filtername = filtername if filterstring != None: self.filterstring = filterstring if filter != None: self.filter = filter if self.filter: pass elif self.filterstring is not None: self.filterstring = self.filterstring.replace("'", '"') self.filter = json.loads(self.filterstring) elif self.filtername is not None and self.filtertable_exists(): await self.cursor.execute('select criteria from ' + self.filtertable + ' where name="' + self.filtername + '"') criteria = await self.cursor.fetchone() if criteria != None: self.filter = json.loads(criteria[0]) elif self.filterpath is not None and os.path.exists(self.filterpath): with open(self.filterpath) as f: ftype = self.filterpath.split('.')[-1] if ftype in ['yml','yaml']: self.filter = yaml.safe_load(f) elif ftype in ['json']: self.filter = json.load(f) if self.filter is None: self.filter = {}
def is_latest(): latest_version = get_latest_release_version() with open('conandata.yml', 'r') as yamlfile: cur_yaml = yaml.safe_load(yamlfile) versions = cur_yaml["sources"].keys() return latest_version in versions
def parse_conf_file(self, conf_path): d = yaml.safe_load(open(conf_path)) for k in self.required_conf_keys: if k not in d: err_msg = 'Required key "%s" not found in configuration' % k raise ConfigurationError(err_msg) if d['level'] in self.valid_levels: d['output_columns'] = [self.id_col_defs[d['level']] ] + d['output_columns'] else: err_msg = '%s is not a valid level. Valid levels are %s' \ %(d['level'], ', '.join(self.valid_levels)) raise ConfigurationError(err_msg) if 'input_format' in d: if d['input_format'] not in self.valid_input_formats: err_msg = 'Invalid input_format %s, select from %s' \ %(d['input_format'], ', '.join(self.valid_input_formats)) else: if d['level'] == 'variant': d['input_format'] = 'crv' elif d['level'] == 'gene': d['input_format'] = 'crg' if 'input_columns' in d: id_col_name = self.id_col_defs[d['level']]['name'] if id_col_name not in d['input_columns']: d['input_columns'].append(id_col_name) else: d['input_columns'] = self.default_input_columns[d['input_format']] for k, v in d.items(): self.__dict__[k] = v
def data_yamls(path): with open(path, "r") as f: try: return yaml.safe_load(f) except yaml.YAMLError as exc: print(exc) return 2
def load_data(self): self.background = '#64778d' if os_is_mac(): self.details = USB.get_from_diskutil() else: self.details = USB.get_from_dmesg() self.devices = yaml.safe_load(Printer.write(self.details, order=[ "dev", "info", "formatted", "size", "active", "readable", "empty", "direct-access", "removable", "writeable"], header=[ "Path", "Info", "Formatted", "Size", "Plugged-in", "Readable", "Empty", "Access", "Removable", "Writeable"], output="yaml"))
def build_and_dump_explainer(explainer_config, dashboard_config=None): explainer = build_explainer(explainer_config) click.echo( f"explainerdashboard ===> Calculating properties by building Dashboard..." ) if dashboard_config is not None: ExplainerDashboard.from_config(explainer, dashboard_config) elif Path(explainer_config['explainer']['dashboard_yaml']).exists(): click.echo( f"explainerdashboard ===> Calculating properties by building Dashboard from {explainer_config['explainer']['dashboard_yaml']}..." ) dashboard_config = yaml.safe_load( open(str(explainer_config['explainer']['dashboard_yaml']), "r")) ExplainerDashboard.from_config(explainer, dashboard_config) else: click.echo(f"explainerdashboard ===> Calculating all properties") explainer.calculate_properties() click.echo( f"explainerdashboard ===> Saving explainer to {explainer_config['explainer']['explainerfile']}..." ) if (dashboard_config is not None and explainer_config['explainer']['explainerfile'] != dashboard_config['dashboard']['explainerfile']): click.echo( f"explainerdashboard ===> Warning explainerfile in explainer config and dashboard config do not match!" ) explainer.dump(explainer_config['explainer']['explainerfile']) return
def load_music(self, path): try: if not os.path.isfile(path): raise AreaError( f"Hub {self.name} trying to load music list: File path {path} is invalid!" ) with open(path, "r", encoding="utf-8") as stream: music_list = yaml.safe_load(stream) prepath = "" for item in music_list: # deprecated, use 'replace_music' hub pref instead # if 'replace' in item: # self.replace_music = item['replace'] is True if "use_unique_folder" in item and item[ "use_unique_folder"] is True: prepath = os.path.splitext(os.path.basename(path))[0] + "/" if "category" not in item: continue if "songs" in item: for song in item["songs"]: song["name"] = prepath + song["name"] self.music_list = music_list except ValueError: raise except AreaError: raise
def get_cba_metadata(cba_tosca_meta_bytes: bytes) -> CbaMetadata: """Parse CBA TOSCA.meta file and get values from it. Args: cba_tosca_meta_bytes (bytes): TOSCA.meta file bytes. Raises: ValidationError: TOSCA Meta file has invalid format. Returns: CbaMetadata: Dataclass with CBA metadata """ meta_dict: dict = yaml.safe_load(cba_tosca_meta_bytes) if not isinstance(meta_dict, dict): raise ValidationError("Invalid TOSCA Meta file") return CbaMetadata( tosca_meta_file_version=meta_dict.get("TOSCA-Meta-File-Version"), csar_version=meta_dict.get("CSAR-Version"), created_by=meta_dict.get("Created-By"), entry_definitions=meta_dict.get("Entry-Definitions"), template_name=meta_dict.get("Template-Name"), template_version=meta_dict.get("Template-Version"), template_tags=meta_dict.get("Template-Tags"), )
def get_content_header(lines: List[str]) -> Tuple[str, Dict[str, Any]]: """ Get header of Jekyll file Args: lines: Lines of profile Returns: content (everything below header as a string), header (parsed yaml, i.e. as dictionary structure) """ header_lines = [] content_lines = [] is_content = False for i_line, line in enumerate(lines): line = line.strip() if i_line == 0: assert line == "---", lines continue if line == "---": is_content = True continue if is_content: content_lines.append(line) else: header_lines.append(line) header = "\n".join(header_lines) content = "\n".join(content_lines) return content, yaml.safe_load(header)
def load_config_file(self): filepath = os.path.dirname(os.path.abspath(__file__)) config_file = os.path.join(filepath, 'config_corona.yaml') with open(config_file) as f: config = oyaml.safe_load(f) return config
def init_from_existing_tutorial(self, tuto_name): """Init a tutorial instance from an existing tutorial (data library and tutorial.md).""" self.name = tuto_name self.set_dir_name() if not self.exists(): raise Exception( "The tutorial %s does not exists. It should be created" % self.name) # get the metadata information of the tutorial (from the top of the tutorial.md) with open(self.tuto_fp, "r") as tuto_f: tuto_content = tuto_f.read() regex = r'^---\n(?P<metadata>[\s\S]*)\n---(?P<body>[\s\S]*)' tuto_split_regex = re.search(regex, tuto_content) if not tuto_split_regex: raise Exception("No metadata found at the top of the tutorial") metadata = yaml.safe_load(tuto_split_regex.group("metadata")) self.title = metadata["title"] self.zenodo_link = metadata["zenodo_link"] self.questions = metadata["questions"] self.objectives = metadata["objectives"] self.time_estimation = metadata["time_estimation"] self.key_points = metadata["key_points"] self.contributors = metadata["contributors"] # the tutorial content self.body = tuto_split_regex.group("body") # get the data library self.init_data_lib()
def parse_conf_file(self, conf_path): d = yaml.safe_load(open(conf_path)) for k in self.required_conf_keys: if k not in d: err_msg = 'Required key "%s" not found in configuration' % k raise ConfigurationError(err_msg) if d["level"] in self.valid_levels: d["output_columns"] = [self.id_col_defs[d["level"]] ] + d["output_columns"] else: err_msg = "%s is not a valid level. Valid levels are %s" % ( d["level"], ", ".join(self.valid_levels), ) raise ConfigurationError(err_msg) if "input_format" in d: if d["input_format"] not in self.valid_input_formats: err_msg = "Invalid input_format %s, select from %s" % ( d["input_format"], ", ".join(self.valid_input_formats), ) else: if d["level"] == "variant": d["input_format"] = "crv" elif d["level"] == "gene": d["input_format"] = "crg" if "input_columns" in d: id_col_name = self.id_col_defs[d["level"]]["name"] if id_col_name not in d["input_columns"]: d["input_columns"].append(id_col_name) else: d["input_columns"] = self.default_input_columns[d["input_format"]] for k, v in d.items(): self.__dict__[k] = v
def handle(self, **options): source = options["api-spec"] output = options["output"] common_url = settings.COMMON_SPEC try: response = requests.get(common_url) response.raise_for_status() common_yaml = response.text except requests.exceptions.RequestException: return common_spec = yaml.safe_load(common_yaml) common_components = common_spec["components"] with open(source, "r", encoding="utf8") as infile: spec = yaml.safe_load(infile) components = spec["components"] refs = {} for scope, scope_items in components.items(): if scope not in common_components: continue for item, item_spec in scope_items.copy().items(): if item not in common_components[scope]: continue common_item_spec = common_components[scope][item] if item_spec == common_item_spec: # add ref to replace ref = f"#/components/{scope}/{item}" refs[ref] = f"{common_url}{ref}" # remove item from internal components del components[scope][item] # remove empty components for scope, scope_items in components.copy().items(): if not scope_items: del components[scope] # replace all refs replace_refs(spec, refs) with open(output, "w", encoding="utf8") as outfile: yaml.add_representer(QuotedString, quoted_scalar) yaml.dump(spec, outfile, default_flow_style=False)
def load_yaml(filepath): """Load the content of a YAML file to a dictionary.""" with open(filepath, "r") as m_file: content = yaml.safe_load(m_file) return content