def do_list_imp(self, what=None, stream=sys.stdout): if what is None: aYaml.writeAsYaml(self, stream) list_to_do = list() if isinstance(what, str): list_to_do.append(what) elif isinstance(what, list): list_to_do.extend(what) whole_sections_to_write = list() individual_items_to_write = list() for item_to_do in list_to_do: if utils.guid_re.match(item_to_do): whole_sections_to_write.append({item_to_do: sorted(iids_from_guids(self.install_definitions_index, item_to_do))}) elif item_to_do == "define": whole_sections_to_write.append(aYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True)) elif item_to_do == "index": whole_sections_to_write.append(aYaml.YamlDumpDocWrap(self.install_definitions_index, '!index', "Installation index", explicit_start=True, sort_mappings=True)) elif item_to_do == "guid": guid_dict = dict() for lic in guid_list(self.install_definitions_index): guid_dict[lic] = sorted(iids_from_guids(self.install_definitions_index, lic)) whole_sections_to_write.append(aYaml.YamlDumpDocWrap(guid_dict, '!guid', "guid to IID", explicit_start=True, sort_mappings=True)) else: individual_items_to_write.append(item_to_do) aYaml.writeAsYaml(whole_sections_to_write + self.repr_for_yaml(individual_items_to_write), stream)
def do_list_imp(self, what=None, stream=sys.stdout): if what is None: aYaml.writeAsYaml(self, stream) list_to_do = list() if isinstance(what, str): list_to_do.append(what) elif isinstance(what, list): list_to_do.extend(what) whole_sections_to_write = list() individual_items_to_write = list() for item_to_do in list_to_do: if utils.guid_re.match(item_to_do): translated_iids, orphaned_guids = self.items_table.iids_from_guids([item_to_do]) whole_sections_to_write.append({item_to_do: translated_iids}) elif item_to_do == "define": whole_sections_to_write.append(aYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True)) elif item_to_do == "index": whole_sections_to_write.append(aYaml.YamlDumpDocWrap(self.items_table.repr_for_yaml(), '!index', "Installation index", explicit_start=True, sort_mappings=True)) elif item_to_do == "guid": guid_dict = dict() all_guids = self.items_table.get_detail_values_by_name_for_all_iids("guid") for a_guid in all_guids: translated_iids, orphaned_guids = self.items_table.iids_from_guids([a_guid]) guid_dict[a_guid] = translated_iids whole_sections_to_write.append(aYaml.YamlDumpDocWrap(guid_dict, '!guid', "guid to IID", explicit_start=True, sort_mappings=True)) else: individual_items_to_write.append(item_to_do) aYaml.writeAsYaml(whole_sections_to_write + self.repr_for_yaml(individual_items_to_write), stream)
def do_list_imp(self, what=None, stream=sys.stdout): if what is None: aYaml.writeAsYaml(self, stream) list_to_do = list() if isinstance(what, str): list_to_do.append(what) elif isinstance(what, list): list_to_do.extend(what) whole_sections_to_write = list() individual_items_to_write = list() for item_to_do in list_to_do: if utils.guid_re.match(item_to_do): translated_iids, orphaned_guids = self.items_table.iids_from_guids([item_to_do]) whole_sections_to_write.append({item_to_do: translated_iids}) elif item_to_do == "define": whole_sections_to_write.append(aYaml.YamlDumpDocWrap(config_vars, '!define', "Definitions", explicit_start=True, sort_mappings=True)) elif item_to_do == "index": whole_sections_to_write.append(aYaml.YamlDumpDocWrap(self.items_table.repr_for_yaml(), '!index', "Installation index", explicit_start=True, sort_mappings=True)) elif item_to_do == "guid": guid_dict = dict() all_guids = self.items_table.get_detail_values_by_name_for_all_iids("guid") for a_guid in all_guids: translated_iids, orphaned_guids = self.items_table.iids_from_guids([a_guid]) guid_dict[a_guid] = translated_iids whole_sections_to_write.append(aYaml.YamlDumpDocWrap(guid_dict, '!guid', "guid to IID", explicit_start=True, sort_mappings=True)) else: individual_items_to_write.append(item_to_do) aYaml.writeAsYaml(whole_sections_to_write + self.repr_for_yaml(individual_items_to_write), stream)
def test_write(self): as_yaml = self.it.repr_for_yaml() as_yaml_doc = aYaml.YamlDumpDocWrap(as_yaml, '!index') as_yaml_doc.ReduceOneItemLists() with open(self.out_file_path, "w") as wfd: utils.chown_chmod_on_fd(wfd) aYaml.writeAsYaml(as_yaml_doc, wfd)
def write_history(self): selected_tab = self.notebook.tab(self.notebook.select(), option='text') var_stack.set_var("SELECTED_TAB").append(selected_tab) the_list_yaml_ready= var_stack.repr_for_yaml(which_vars=var_stack.ResolveVarToList("__GUI_CONFIG_FILE_VARS__", default=[]), include_comments=False, resolve=False, ignore_unknown_vars=True) the_doc_yaml_ready = aYaml.YamlDumpDocWrap(the_list_yaml_ready, '!define', "Definitions", explicit_start=True, sort_mappings=True) with open(var_stack.ResolveVarToStr("INSTL_GUI_CONFIG_FILE_NAME"), "w", encoding='utf-8') as wfd: utils.make_open_file_read_write_for_all(wfd) aYaml.writeAsYaml(the_doc_yaml_ready, wfd)
def write_history(self): selected_tab = self.notebook.tab(self.notebook.select(), option='text') config_vars["SELECTED_TAB"] = selected_tab which_vars_for_yaml = config_vars.get("__GUI_CONFIG_FILE_VARS__", []).list() the_list_yaml_ready= config_vars.repr_for_yaml(which_vars=which_vars_for_yaml, resolve=False, ignore_unknown_vars=True) the_doc_yaml_ready = aYaml.YamlDumpDocWrap(the_list_yaml_ready, '!define', "Definitions", explicit_start=True, sort_mappings=True) with utils.utf8_open_for_write(config_vars["INSTL_GUI_CONFIG_FILE_NAME"].str(), "w") as wfd: aYaml.writeAsYaml(the_doc_yaml_ready, wfd)
def __call__(self, *args, **kwargs) -> None: if "REPO_REV_FILE_VARS" not in config_vars: # must have a list of variable names to write to the repo-rev file raise ValueError("REPO_REV_FILE_VARS must be defined") repo_rev_vars = list(config_vars["REPO_REV_FILE_VARS"]) # list of configVars to write to the repo-rev file # check that the variable names from REPO_REV_FILE_VARS do not contain # names that must not be made public dangerous_intersection = set(repo_rev_vars).intersection( {"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "PRIVATE_KEY", "PRIVATE_KEY_FILE"}) if dangerous_intersection: log.warning("found", str(dangerous_intersection), "in REPO_REV_FILE_VARS, aborting") raise ValueError(f"file REPO_REV_FILE_VARS {dangerous_intersection} and so is forbidden to upload") use_zlib = bool(config_vars.get("USE_ZLIB", "False")) # should we consider zipped files or not zip_extension = "" if use_zlib: zip_extension = config_vars.get("WZLIB_EXTENSION", ".wzip").str() revision_instl_folder_path = Path(config_vars["UPLOAD_REVISION_INSTL_FOLDER"]) # create checksum for the main info_map file, either wzipped or not main_info_map_file_name = "info_map.txt"+zip_extension main_info_map_file = revision_instl_folder_path.joinpath(main_info_map_file_name) main_info_map_checksum = utils.get_file_checksum(main_info_map_file) config_vars["INFO_MAP_FILE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+main_info_map_file_name config_vars["INFO_MAP_CHECKSUM"] = main_info_map_checksum # create checksum for the main index.yaml file, either wzipped or not index_file_name = "index.yaml"+zip_extension index_file_path = revision_instl_folder_path.joinpath(index_file_name) config_vars["INDEX_CHECKSUM"] = utils.get_file_checksum(index_file_path) config_vars["INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+index_file_name short_index_file_name = "short-index.yaml" short_index_file_path = revision_instl_folder_path.joinpath(short_index_file_name) config_vars["SHORT_INDEX_CHECKSUM"] = utils.get_file_checksum(short_index_file_path) config_vars["SHORT_INDEX_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl/"+short_index_file_name config_vars["INSTL_FOLDER_BASE_URL"] = "$(BASE_LINKS_URL)/$(REPO_NAME)/$(__CURR_REPO_FOLDER_HIERARCHY__)/instl" config_vars["REPO_REV_FOLDER_HIERARCHY"] = "$(__CURR_REPO_FOLDER_HIERARCHY__)" # check that all variables are present # <class 'list'>: ['INSTL_FOLDER_BASE_URL', 'REPO_REV_FOLDER_HIERARCHY', 'SYNC_BASE_URL'] missing_vars = [var for var in repo_rev_vars if var not in config_vars] if missing_vars: raise ValueError(f"{missing_vars} are missing cannot write repo rev file") # create yaml out of the variables variables_as_yaml = config_vars.repr_for_yaml(repo_rev_vars) repo_rev_yaml_doc = aYaml.YamlDumpDocWrap(variables_as_yaml, '!define', "", explicit_start=True, sort_mappings=True) repo_rev_file_path = config_vars["UPLOAD_REVISION_REPO_REV_FILE"] with utils.utf8_open_for_write(repo_rev_file_path, "w") as wfd: aYaml.writeAsYaml(repo_rev_yaml_doc, out_stream=wfd, indentor=None, sort=True) log.info(f"""create {repo_rev_file_path}""")
def __call__(self, *args, **kwargs) -> None: short_index_data = self.items_table.get_data_for_short_index( ) # iid, name, version_mac, version_win, install_guid, remove_guid short_index_dict = defaultdict(dict) builtin_iids = list(config_vars["SPECIAL_BUILD_IN_IIDS"]) for data_line in short_index_data: data_dict = dict(data_line) IID = data_dict['iid'] if IID not in builtin_iids: if data_dict['name']: short_index_dict[IID]['name'] = data_dict['name'] if data_dict['version_mac'] == data_dict['version_win']: short_index_dict[IID]['version'] = data_dict['version_mac'] else: if data_dict['version_mac']: short_index_dict[IID]['Mac'] = { 'version': data_dict['version_mac'] } if data_dict['version_win']: short_index_dict[IID]['Win'] = { 'version': data_dict['version_win'] } if data_dict['install_guid']: if data_dict['remove_guid'] != data_dict[ 'install_guid']: # found uninstall gui short_index_dict[IID]['guid'] = list( (data_dict['install_guid'], data_dict['remove_guid'])) else: short_index_dict[IID]['guid'] = data_dict[ 'install_guid'] defines_dict = config_vars.repr_for_yaml(which_vars=list( config_vars['SHORT_INDEX_FILE_VARS']), resolve=True, ignore_unknown_vars=False) defines_yaml_doc = aYaml.YamlDumpDocWrap(defines_dict, '!define', "Definitions", explicit_start=True, sort_mappings=True) index_yaml_doc = aYaml.YamlDumpDocWrap(value=short_index_dict, tag="!index", explicit_start=True, explicit_end=False, sort_mappings=True, include_comments=False) with utils.utf8_open_for_write(self.short_index_yaml_path, "w") as wfd: aYaml.writeAsYaml(defines_yaml_doc, wfd) aYaml.writeAsYaml(index_yaml_doc, wfd)
def write_require_file(self, file_path, require_dict): with open(file_path, "w", encoding='utf-8') as wfd: utils.make_open_file_read_write_for_all(wfd) define_dict = aYaml.YamlDumpDocWrap({"REQUIRE_REPO_REV": var_stack.ResolveVarToStr("MAX_REPO_REV")}, '!define', "definitions", explicit_start=True, sort_mappings=True) require_dict = aYaml.YamlDumpDocWrap(require_dict, '!require', "requirements", explicit_start=True, sort_mappings=True) aYaml.writeAsYaml((define_dict, require_dict), wfd)
def do_listindex(self, params): if params: params = shlex.split(params) params_not_in_index = list() for param in params: if param in self.client_prog_inst.install_definitions_index: self.client_prog_inst.install_definitions_index[param].resolve_inheritance(self.client_prog_inst.install_definitions_index) aYaml.writeAsYaml({param: self.client_prog_inst.install_definitions_index[param].repr_for_yaml()}) else: params_not_in_index.append(param) if params_not_in_index: print("Not found in index:\n ", "\n ".join(params_not_in_index))
def create_instl_history_file(self): var_stack.set_var("__BATCH_CREATE_TIME__").append(time.strftime("%Y/%m/%d %H:%M:%S")) yaml_of_defines = aYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True) # write the history file, but only if variable LOCAL_REPO_BOOKKEEPING_DIR is defined # and the folder actually exists. instl_temp_history_file_path = var_stack.ResolveVarToStr("INSTL_HISTORY_TEMP_PATH") instl_temp_history_folder, instl_temp_history_file_name = os.path.split(instl_temp_history_file_path) if os.path.isdir(instl_temp_history_folder): with open(instl_temp_history_file_path, "w", encoding='utf-8') as wfd: utils.make_open_file_read_write_for_all(wfd) aYaml.writeAsYaml(yaml_of_defines, wfd) self.batch_accum += self.platform_helper.append_file_to_file("$(INSTL_HISTORY_TEMP_PATH)", "$(INSTL_HISTORY_PATH)")
def write_require_file(self, file_path, require_dict): with utils.utf8_open_for_write(file_path, "w") as wfd: define_dict = aYaml.YamlDumpDocWrap( {"REQUIRE_REPO_REV": config_vars["MAX_REPO_REV"].str()}, '!define', "definitions", explicit_start=True, sort_mappings=True) require_dict = aYaml.YamlDumpDocWrap(require_dict, '!require', "requirements", explicit_start=True, sort_mappings=True) aYaml.writeAsYaml((define_dict, require_dict), wfd)
def test_readFile(self): input_file_path = Path(__file__).parent.joinpath("test_input.yaml") out_file_path = Path(__file__).parent.joinpath("test_out.yaml") expected_file_path = Path(__file__).parent.joinpath( "expected_output.yaml") reader = ConfigVarYamlReader() reader.read_yaml_file(input_file_path) variables_as_yaml = config_vars.repr_for_yaml() yaml_doc = aYaml.YamlDumpDocWrap(variables_as_yaml, '!define', "", explicit_start=True, sort_mappings=True) with open(out_file_path, "w") as wfd: aYaml.writeAsYaml(yaml_doc, wfd) out_lines = normalize_yaml_lines(out_file_path) expected_lines = normalize_yaml_lines(expected_file_path) self.assertEqual(out_lines, expected_lines)
def command_output(self): if not bool(config_vars.get('__SILENT__', "false")): output_format = str(config_vars.get("OUTPUT_FORMAT", 'text')) if output_format == "json": output_text = json.dumps(self.output_data, indent=1, default=utils.extra_json_serializer) elif output_format == "yaml": io_str = io.StringIO() for yaml_data in self.output_data: aYaml.writeAsYaml(yaml_data, io_str) output_text = io_str.getvalue() else: # output_format == "text": text is the default format lines = [ ", ".join(line_data) for line_data in self.output_data ] output_text = "\n".join(lines) out_file = config_vars.get("__MAIN_OUT_FILE__", None).Path() with utils.write_to_file_or_stdout(out_file) as wfd: wfd.write(output_text) wfd.write("\n")
def __command_output(self, _as_yaml_doc): out_file_path = config_vars.get("__MAIN_OUT_FILE__", None).Path() with utils.write_to_file_or_stdout(out_file_path) as wfd: aYaml.writeAsYaml(defines_yaml_doc, wfd) aYaml.writeAsYaml(index_yaml_doc, wfd)
def test_write(self): as_yaml = self.it.repr_for_yaml() as_yaml_doc = aYaml.YamlDumpDocWrap(as_yaml, '!index') as_yaml_doc.ReduceOneItemLists() with open(self.out_file_path, "w") as wfd: aYaml.writeAsYaml(as_yaml_doc, wfd)
def write_program_state(self): state_file = var_stack.ResolveVarToStr("__MAIN_STATE_FILE__") with utils.write_to_file_or_stdout(state_file) as fd: aYaml.writeAsYaml(self, fd)
def read_include_node(self, i_node, *args, **kwargs): pass # override to handle __include__, __include_if_exist__ nodes # regex to find conditionals e.g. __ifndef__(S3_BUCKET_NAME) conditional_re = re.compile("""__if(?P<if_type>.*)__\s*\((?P<condition>.+)\)""") def read_conditional_node(self, identifier, contents, *args, **kwargs): match = self.conditional_re.match(identifier) if match: condition = match.group('condition') if_type = match.group('if_type') if if_type == "def": # __ifdef__: if configVar is defined if condition in var_stack: self.read_defines(contents) elif if_type == "ndef": # __ifndef__: if configVar is not defined if condition not in var_stack: self.read_defines(contents) elif if_type == "": # "__if__: eval the condition resolved_condition = var_stack.ResolveStrToStr(condition) condition_result = eval(resolved_condition) if condition_result: self.read_defines(contents) else: print("unknown conditional {}".format(identifier)) if __name__ == "__main__": aReader = ConfigVarYamlReader() aReader.read_yaml_file("/p4client/dev_saa/ProAudio/XPlatform/Apps/SAA_Juce/audio plugin host/saa_post_build.yaml") aYaml.writeAsYaml(var_stack, sys.stdout, sort=True)