def __init__(self, base_info, platform: Enum): self.shared = base_info self.platform = platform self.macros = {**get_arg_macros(), **get_platform_macros(platform)} verbose("") [ verbose_color(Color.DGREEN, 'Set Macro: {0} = "{1}"'.format(name, value)) for name, value in self.macros.items() ] self._projects_all = [] # this stores all everything in dependency_paths in a base file # and also has path fixes on it if used with a include with a path to change to self.dependency_dict = {} # if any path was modified above, then it's also added here with the original path self.dependency_dict_original = {} # for generators and parts of qpc to use: self.configurations = [] self.projects = [] self.project_folders = {} self.project_dependencies = {}
def _project_check_file_hash(project_dir: str, hash_list: list, project_path: str) -> bool: result = True for hash_block in hash_list: if os.path.isabs(hash_block.values[0]) or not project_dir: project_file_path = posix_path( os.path.normpath(hash_block.values[0])) else: project_file_path = posix_path( os.path.normpath(project_dir + "/" + hash_block.values[0])) if hash_block.key != make_hash(project_file_path): if not CHECKED_HASHES[project_path][ "rebuild_all"] and hash_block.values[ 0] in QPC_GENERATOR_HASHES: generator_name = os.path.splitext( os.path.basename(hash_block.values[0]))[0] if generator_name in args.generators: CHECKED_HASHES[project_path]["generators"].append( generator_name) else: CHECKED_HASHES[project_path]["rebuild_all"] = True verbose("File Modified: " + hash_block.values[0]) result = False return result
def _parse_project(self, project_file: QPCBlockBase, project: ProjectPass, file_path: str, indent: str = "") -> None: file_dir, file_name = os.path.split(file_path) def set_script_macros(): project.add_macro(indent, "SCRIPT_NAME", file_name) project.add_macro(indent, "SCRIPT_DIR", file_dir) set_script_macros() for project_block in project_file: if project_block.solve_condition(project.macros): if project_block.key == "macro": project.add_macro( indent, *project.replace_macros_list(*project_block.values)) elif project_block.key == "configuration": self._parse_config(project_block, project) elif project_block.key == "files": self._parse_files(project_block, project, []) elif project_block.key == "dependencies": for block in project_block.get_items_cond(project.macros): if block.key == "-": project.remove_dependencies(*block.values) else: project.add_dependencies(block.key, *block.values) elif project_block.key == "build_event": self._parse_build_event(project_block, project) elif project_block.key == "include": # Ah shit, here we go again. include_path = project.replace_macros( project_block.values[0]) include_file = self._include_file(include_path, project, indent + " ") if include_file: try: self._parse_project(include_file, project, include_path, indent + " ") # reset the script macros back to the values for this script set_script_macros() except RecursionError: raise RecursionError( "Recursive Includes found:\n" + project_block.get_formatted_info()) verbose(indent + " " + "Finished Parsing") else: project_block.warning( f"File does not exist: {include_path}") else: project_block.warning("Unknown key: ")
def _include_file(self, include_path: str, project: ProjectPass, indent: str) -> QPCBlockBase: project.hash_list[include_path] = qpc_hash.make_hash(include_path) include_file = self.read_file(include_path) if not include_file: return None verbose(indent + "Parsing: " + include_path) return include_file
def _check_file_hash(project_dir: str, hash_list: list) -> bool: for hash_block in hash_list: if os.path.isabs(hash_block.values[0]) or not project_dir: project_file_path = posix_path( os.path.normpath(hash_block.values[0])) else: project_file_path = posix_path( os.path.normpath(project_dir + "/" + hash_block.values[0])) if hash_block.key != make_hash(project_file_path): verbose("File Modified: " + hash_block.values[0]) return False return True
def _check_glob_files(project_dir: str, file_list: list) -> bool: for file_block in file_list: file_hash = file_block.key file_glob = file_block.values[0] glob_list = glob.glob(project_dir + "/" + file_glob) for index, path in enumerate(glob_list): glob_list[index] = posix_path(path) glob_list.sort() if file_hash != hash_from_string(' '.join(glob_list)): verbose("Files found are different: " + file_glob) return False return True
def check_master_file_hash(project_path: str, base_info, generator, hash_list: dict) -> bool: project_hash_file_path = get_hash_file_path(project_path) project_dir = os.path.split(project_path)[0] total_blocks = sorted(("commands", "hashes", "files")) blocks_found = [] if os.path.isfile(project_hash_file_path): hash_file = qpc_reader.read_file(project_hash_file_path) if not hash_file: return False for block in hash_file: if block.key == "commands": blocks_found.append(block.key) if not _check_commands(project_dir, block.items, 5): return False elif block.key == "hashes": blocks_found.append(block.key) if not _check_file_hash(project_dir, block.items): return False elif block.key == "files": blocks_found.append(block.key) if not base_info.project_hashes: continue if generator.uses_folders(): if not _check_files(project_dir, block.items, hash_list, base_info.projects): return False else: if not _check_files(project_dir, block.items, hash_list): return False else: # how would this happen block.warning("Unknown Key in Hash: ") if total_blocks == sorted(blocks_found): print("Valid: " + project_path + get_hash_file_ext(project_path)) return True return False else: verbose("Hash File does not exist") return False
def parse_base_info(self, base_file_path: str) -> BaseInfo: info = BaseInfo() if base_file_path: verbose("\nReading: " + args.base_file) base_file = self.read_file(base_file_path) if not base_file: warning("Base File does not exist: " + base_file_path) else: verbose("\nParsing: " + args.base_file) [ self._parse_base_info_recurse(info_plat, base_file) for info_plat in info.info_list ] info.finish_parsing() return info
def _check_files(project_dir, hash_file_list, file_list, project_def_list: dict = None) -> bool: if len(hash_file_list) != len(file_list): return False for file_block in hash_file_list: hash_path = file_block.get_item_values("hash_path")[0] folder = file_block.get_item_values("folder") folder = folder[0] if folder else "" dependency_hash = file_block.get_item_values("dependency_hash") dependency_hash = dependency_hash[0] if dependency_hash else "" if os.path.isabs(hash_path) or not project_dir: hash_path = posix_path(os.path.normpath(hash_path)) else: hash_path = posix_path( os.path.normpath(project_dir + "/" + hash_path)) if hash_path not in file_list.values(): verbose("New project added: " + file_block.key) return False elif folder and project_def_list: for project_def in project_def_list: if file_block.key == project_def.path: if folder != "/".join(project_def_list[project_def]): return False break # Now check dependencies project_dep_list = get_project_dependencies(file_block.key) if not project_dep_list: if dependency_hash: # and not script_path.values[0] == "": # all dependencies were removed from it, and we think it has some still, rebuild verbose("Outdated dependency list: " + file_block.key) return False continue elif not dependency_hash and project_dep_list: # project has dependencies now, and we think it doesn't, rebuild return False project_dep_list.sort() if dependency_hash != hash_from_string(' '.join(project_dep_list)): verbose(f"Dependencies Changed: \"{file_block.key}\"") return False return True
def parse_project(self, project_def: ProjectDefinition, project_script: str, info: BaseInfo, generator_list: list) -> ProjectContainer: if args.time: start_time = perf_counter() elif not args.verbose: print("Parsing: " + project_script) project_filename = os.path.split(project_script)[1] project_block = self.read_file(project_filename) if project_block is None: warning("Script does not exist: " + project_script) return project_name = os.path.splitext(project_filename)[0] project_container = ProjectContainer(project_name, project_script, info, project_def, generator_list) for project_pass in project_container._passes: verbose(f"\n ---- Parsing Project - " f"Config: \"{project_pass.config_name}\" " f"Platform: \"{project_pass.platform.name}\" " f"Arch: \"{project_pass.arch.name}\" ---- \n") verbose("Parsing: " + project_script) project_pass.hash_list[project_filename] = qpc_hash.make_hash( project_filename) self._parse_project(project_block, project_pass, project_script) self.counter += 1 if project_pass.config.general.configuration_type is None: error( "No configuration_type Specified in Script!", "Pick one of these and add it to the \"general\" group:", " ".join( [f"\"{enum.name.lower()}\"" for enum in ConfigType])) verbose("Parsed: " + project_container.get_display_name()) if args.time: print( str(round(perf_counter() - start_time, 4)) + " - Parsed: " + project_script) return project_container
def check_hash(project_path: str, print_allowed: bool = True) -> bool: if project_path in CHECKED_HASHES: return CHECKED_HASHES[project_path]["result"] project_hash_file_path = get_hash_file_path(project_path) project_dir = os.path.split(project_path)[0] total_blocks = sorted(("commands", "glob_files", "hashes")) blocks_found = [] CHECKED_HASHES[project_path] = { "result": True, "generators": [], "rebuild_all": False } result = True if os.path.isfile(project_hash_file_path): hash_file = qpc_reader.read_file(project_hash_file_path) if not hash_file: CHECKED_HASHES[project_path]["result"] = False CHECKED_HASHES[project_path]["rebuild_all"] = True return False for block in hash_file: if not result: CHECKED_HASHES[project_path]["result"] = False return False if block.key == "commands": blocks_found.append(block.key) result = _check_commands(project_dir, block.items, 4) CHECKED_HASHES[project_path]["rebuild_all"] = not result elif block.key == "hashes": blocks_found.append(block.key) result = _project_check_file_hash(project_dir, block.items, project_path) elif block.key == "dependencies": pass elif block.key == "glob_files": blocks_found.append(block.key) result = _check_glob_files(project_dir, block.items) CHECKED_HASHES[project_path]["rebuild_all"] = not result elif print_allowed: # how would this happen block.warning("Unknown Key in Hash: ") if total_blocks == sorted(blocks_found): if print_allowed: print("Valid: " + project_path + get_hash_file_ext(project_path)) CHECKED_HASHES[project_path]["result"] = True return True CHECKED_HASHES[project_path]["result"] = False return False else: if print_allowed: verbose("Hash File does not exist") CHECKED_HASHES[project_path]["result"] = False CHECKED_HASHES[project_path]["rebuild_all"] = True return False
def _parse_base_info_recurse(self, info: BaseInfoPlatform, base_file: QPCBlockBase, include_dir: str = "") -> None: for project_block in base_file: if not project_block.solve_condition(info.macros): continue elif project_block.key == "macro": info.add_macro(project_block) elif project_block.key == "configurations": configs = project_block.get_item_list_condition(info.macros) [ info.configurations.append(config) for config in configs if config not in info.configurations ] # obsolete elif project_block.key == "dependency_paths": project_block.warning( "dependency_paths is obsolete, now uses project paths directly" ) continue elif not project_block.values: continue elif project_block.key == "project": self._base_project_define(project_block, info, include_dir) elif project_block.key == "group": self._base_group_define(project_block, info) elif project_block.key == "include": # "Ah shit, here we go again." file_path = os.path.normpath( replace_macros(project_block.values[0], info.macros)) new_include_dir = include_dir if len(project_block.values) >= 2: new_include_dir += "/" + project_block.values[ 1] if include_dir else project_block.values[1] new_include_dir = replace_macros(new_include_dir, info.macros) current_dir = os.getcwd() if os.path.isdir(new_include_dir): os.chdir(new_include_dir) verbose("Reading: " + file_path) try: include_file = read_file(file_path) verbose("Parsing... ") self._parse_base_info_recurse(info, include_file, new_include_dir) except FileNotFoundError: project_block.warning("File Does Not Exist: ") if len(project_block.values) >= 2: os.chdir(current_dir) elif not args.hide_warnings: project_block.warning("Unknown Key: ")