def find_in_repository(self, repository_path): """ Searches for group.xml and patterns.xml in the package-groups-*.rpm package in the given repository (if exists) and returns them. @param repository_path The path to the repository. """ package_groups_package = None package_groups_packages = files.find_fast(repository_path, "^package-groups.*\.rpm$") if len(package_groups_packages) > 1: logging.warning("Multiple package-groups RPMs found:") for package in package_groups_packages: logging.warning(" * {0}".format(package)) package_groups_package = package_groups_packages[0] logging.warning("Selecting {0}".format(package_groups_package)) elif len(package_groups_packages) == 1: package_groups_package = package_groups_packages[0] if package_groups_package is None: return None, None package_groups_package = os.path.abspath(package_groups_package) directory_unpacking = temporaries.create_temporary_directory("groups") initial_directory = os.getcwd() files.unrpm(package_groups_package, directory_unpacking) self.find_in_directory(directory_unpacking)
def prepend_preload_library(library_name, output_directory_path): """ Prepends library """ library_paths = files.find_fast(output_directory_path, "{0}.so.*".format(library_name)) library_paths_real = [] library_path_real = None for library_path in library_paths: if not os.path.islink(library_path): library_paths_real.append(library_path) if len(library_paths_real) > 1: logging.warning("Found several libraries {0}".format(library_name)) for library_path in library_paths_real: logging.warning(" * {0}".format(library_path)) elif len(library_paths_real) < 1: logging.error("Found no libraries {0}".format(library_name)) sys.exit("Error.") library_path_real = library_paths_real[0] library_basename = os.path.basename(library_path_real) root = temporaries.mount_firmware(output_directory_path) ld_preload_path = os.path.join(root, "etc/ld.so.preload") lines = ["{0}\n".format(library_basename)] if os.path.isfile(ld_preload_path): with open(ld_preload_path, "r") as ld_preload: for line in ld_preload: if not line.startswith(library_name): lines.append(line) with open(ld_preload_path, "w") as ld_preload: for line in lines: ld_preload.write(line)
def find_in_directory(self, directory_path): """ Tries to find group.xml and patterns.xml files in the given directory and reads them in. @param directory_path The search directory. """ all_groups = files.find_fast(directory_path, ".*group\.xml$") all_groups_gzipped = files.find_fast(directory_path, ".*group\.xml\.gz$") for path in all_groups_gzipped: hidden_subprocess.silent_call(["gzip", "-d", "-k", "-f", path]) all_groups.append(path.replace(".gz", "")) all_patterns = files.find_fast(directory_path, ".*patterns\.xml$") all_patterns_gzipped = files.find_fast(directory_path, ".*patterns\.xml\.gz$") for path in all_patterns_gzipped: hidden_subprocess.silent_call(["gzip", "-d", "-k", "-f", path]) all_patterns.append(path.replace(".gz", "")) groups = None if len(all_groups) > 1: logging.warning("Multiple groups XML files found:") for file_path in all_groups: logging.warning(" * {0}".format(file_path)) groups = all_groups[0] logging.warning("Selecting {0}".format(groups)) elif len(all_groups) == 1: groups = all_groups[0] patterns = None if len(all_patterns) > 1: logging.warning("Multiple patterns XML files found:") for file_path in all_patterns: logging.warning(" * {0}".format(file_path)) patterns = all_patterns[0] logging.warning("Selecting {0}".format(patterns)) elif len(all_patterns) == 1: patterns = all_patterns[0] if groups is not None: with open(groups, "r") as groups_file: self.groups_data = groups_file.readlines() if patterns is not None: with open(patterns, "r") as patterns_file: self.patterns_data = patterns_file.readlines()
def __find_qemu_executable(self): """ Looks for the appropriate qemu executable for the given architecture in the given directory. """ # The synonyms for the architecture: arches = self.__produce_architecture_synonyms_list(self.architecture) executables = [] for arch in arches: qemu_name = "^qemu-{0}$".format(arch) qemu_binfmt_name = "^qemu-{0}-binfmt$".format(arch) executables_portion = files.find_fast(self.patching_root, qemu_binfmt_name) executables.extend(executables_portion) executables_portion = files.find_fast(self.patching_root, qemu_name) executables.extend(executables_portion) logging.warning("Found several qemu executables:") working_executables = [] for path in executables: if "bootstrap" in path: continue relative_path = os.path.relpath(path, self.patching_root) if check.command_exists(path): working_executables.append(path) summary = "workinig" else: summary = "not working" logging.warning(" * /{0} ({1})".format(relative_path, summary)) if len(working_executables) < 1: logging.error("No working qemu executables found!") sys.exit("Error.") else: selected_path = working_executables[0] relative_path = os.path.relpath(selected_path, self.patching_root) logging.warning("The following one was selected: " "/{0}".format(relative_path)) return "/{0}".format(relative_path)
def __init__(self, cache_directory, name_checking_function): """ Initializes the repository downloader. @param cache_directory The combirepo cache directory. @param name_checking_function The function that checks URLs to be downloaded. """ check.directory_exists(cache_directory) self._cache_directory = cache_directory self._repositories = [] self._name_checking_function = name_checking_function config_paths = files.find_fast(self._cache_directory, ".repository.conf") for config_path in config_paths: parser = configparser.SafeConfigParser() parser.read(config_path) if not parser.has_section("repository"): logging.error("Repository config {0} does not contain " "[repository] section!".format(config_path)) continue if not parser.has_option("repository", "url"): logging.error("Repository config {0} does not contain " "option \"url\" in section " "[repository]!".format(config_path)) continue url = parser.get("repository", "url") if not parser.has_option("repository", "status"): logging.error("Repository config {0} does not contain " "option \"status\" in section " "[repository]!".format(config_path)) status = "unknown" else: status = parser.get("repository", "status") repository = {} repository["url"] = url repository["path"] = os.path.dirname(config_path) repository["status"] = status global update_repositories if (update_repositories is not None and (url in update_repositories or "all" in update_repositories)): shutil.rmtree(os.path.dirname(config_path)) logging.info("Repository for URL {0} will be " "updated!".format(url)) else: self._repositories.append(repository) for repository in self._repositories: logging.debug("Found repository: {0}".format(repository))
def __find_platform_images(images_directory): """ Finds the platform images in the directory. images_directory The directory with images. @return The path to the selected images. """ logging.debug("Searching in directory " "{0}".format(images_directory)) if not os.path.isdir(images_directory): raise Exception("{0} is not a " "directory!".format(images_directory)) images = files.find_fast(images_directory, ".*\.img$") return images
def __preprocess_cache(self): """ Preprocesses the patching RPMs cache. """ global drop_patching_cache if drop_patching_cache: global patching_cache_path hidden_subprocess.call("Drop patching cache", ["sudo", "rm", "-rf", patching_cache_path]) os.makedirs(patching_cache_path) return ready_rpms = files.find_fast(patching_cache_path, ".*\.rpm") info_items = {} for rpm in ready_rpms: info_path = "{0}.info.txt".format(rpm) if os.path.isfile(info_path): with open(info_path, "r") as info_file: lines = [] for line in info_file: lines.append(line) info_item = lines[0] info_items[info_item] = rpm for info_item in info_items.keys(): logging.info("Found item {0} at location " "{1}".format(info_item, info_items[info_item])) copy_tasks = [] tasks_undone = [] for i_task in range(len(self._tasks)): task = self._tasks[i_task] name, path, destination, release, updates = task info = "{0}".format((name, path, release, updates)) logging.info("Searching for {0}".format(info)) if_cached = False for key in info_items.keys(): if key == info: cached_package_path = info_items[key] logging.info("Found already patched RPM at " "{0}".format(cached_package_path)) copy_tasks.append((name, cached_package_path, destination)) if_cached = True break if not if_cached: tasks_undone.append(task) self._tasks = tasks_undone if len(copy_tasks) > 0: hidden_subprocess.function_call_list("Copying from cache", shutil.copy, copy_tasks)
def _get_results(self): results = [] for root in self.patching_root_clones: results_path = os.path.join(root, "rpmrebuild_results") if not os.path.isdir(results_path): continue paths = files.find_fast(results_path, ".*\.rpm") for path in paths: name = self._package_names[os.path.basename(path)] if name is None: continue result_path = os.path.realpath(path) modification_time = os.path.getmtime(result_path) results.append((name, result_path, modification_time)) results.sort(key=lambda tup: tup[2]) return results
def __prepare_image(self, graphs): """ Prepares the image needed for the RPM patcher. @param graphs The list of dependency graphs of repositories. @return The directory with preliminary images. """ original_images_dir = None global developer_original_image global developer_outdir_original if developer_outdir_original is None: path = temporaries.create_temporary_directory("preliminary-image") developer_outdir_original = path self.images_directory = developer_outdir_original if not os.path.isdir(developer_outdir_original): os.makedirs(developer_outdir_original) images = files.find_fast(self.images_directory, ".*\.img$") if (images is not None and len(images) > 0): return if developer_original_image is None: if developer_outdir_original is None: directory = temporaries.create_temporary_directory("orig") developer_outdir_original = directory original_images_dir = developer_outdir_original path = temporaries.create_temporary_file("mod.ks") shutil.copy(self.kickstart_file_path, path) kickstart_file = KickstartFile(path) kickstart_file.comment_all_groups() logging.debug("Repositories: {0}".format(self.repositories)) packages = prepare_minimal_packages_list(graphs) repository_combiner.create_image(self.architecture, self.names, self.repositories, path, ["--outdir", original_images_dir], packages) else: if os.path.isdir(developer_original_image): original_images_dir = developer_original_image elif os.path.isfile(developer_original_image): original_images_dir = os.path.dirname(developer_original_image) else: logging.error("Given {0} is not a file or a " "directory.".format(developer_original_image)) sys.exit("Error.") self.images_directory = original_images_dir
def __prepare(self): """ Prepares the patching root ready for RPM patching. """ global developer_disable_patching if developer_disable_patching: logging.debug("RPM patcher will not be prepared.") return graphs = self._graphs self.__prepare_image(graphs) self.patching_root = temporaries.mount_firmware(self.images_directory) host_arch = platform.machine() host_arches = self.__produce_architecture_synonyms_list(host_arch) if self.architecture not in host_arches: self.__deploy_qemu_package() combirepo_dir = os.path.abspath(os.path.dirname(__file__)) rpmrebuild_file = os.path.join(combirepo_dir, 'data/rpmrebuild.tar') already_present_rpmrebuilds = files.find_fast(self.patching_root, "rpmrebuild.*") for already_present_rpmrebuild in already_present_rpmrebuilds: if os.path.isdir(already_present_rpmrebuild): shutil.rmtree(already_present_rpmrebuild) elif os.path.isfile(already_present_rpmrebuild): os.remove(already_present_rpmrebuild) hidden_subprocess.call( "Extracting the rpmrebuild ", ["tar", "xf", rpmrebuild_file, "-C", self.patching_root]) queue = multiprocessing.Queue() child = multiprocessing.Process(target=self.__install_rpmrebuild, args=(queue, )) child.start() child.join() if queue.empty(): logging.error("Failed to install rpmrebuild into chroot.") sys.exit("Error.") else: result = queue.get() if result: logging.debug("Installation of rpmrebuild successfully " "completed.") else: raise Exception("Impossible happened.")
def __use_cached_root_or_prepare(self): """ Tries to find cached root and uses it in case it exists and prepares it otherwise. """ image_info = "{0}".format( (self.names, self.repositories, self.architecture, os.path.basename(self.kickstart_file_path))) cached_images_info_paths = files.find_fast( patching_cache_path, ".*preliminary_image.info.txt") matching_images_path = None self.__prepare() for info_path in cached_images_info_paths: cached_images_path = info_path.replace(".info.txt", "") if not os.path.isdir(cached_images_path): logging.error("Directory {0} not " "found!".format(cached_images_path)) continue lines = [] with open(info_path, "r") as info_file: for line in info_file: lines.append(line) if lines[0] == image_info: matching_images_path = cached_images_path break if matching_images_path is not None: self.patching_root = matching_images_path logging.info("Found already prepared patching root: " "{0}".format(matching_images_path)) else: cached_chroot_path = os.path.join( patching_cache_path, os.path.basename(self.patching_root) + "preliminary_image") hidden_subprocess.call("Saving chroot to cache", [ "sudo", "cp", "-Z", "-P", "-a", self.patching_root, cached_chroot_path ]) info_path = cached_chroot_path + ".info.txt" with open(info_path, "wb") as info_file: info_file.write(image_info)
def __unpack_qemu_packages(self): """ Looks for all qemu packages in the given list of repositories and unpacks them to the given directory. """ initial_directory = os.getcwd() qemu_packages = [] qemu_package = self.qemu_path if qemu_package is None: expression = "^qemu.*\.{0}\.rpm$".format(self.architecture) for repository in self.repositories: qemu_packages_portion = files.find_fast(repository, expression) qemu_packages.extend(qemu_packages_portion) logging.warning("The following qemu packages will be unpacked in " "chroot:") for package in qemu_packages: logging.warning(" * {0}".format(package)) else: qemu_packages.append(qemu_package) for package in qemu_packages: files.unrpm(package, self.patching_root)
def get_kickstart_from_repos(repository_pairs, kickstart_substring): """ Gets kickstart files from repositories that are used during the build. @param repository_pairs The repository pairs used during the image building. @param kickstart_substring The substring that specifies the substring of kickstart file name to be used. """ if kickstart_substring is None: kickstart_substring = "" image_configurations_rpms = {} for repository_pair in repository_pairs: path = repository_pair.url rpms = files.find_fast(path, "image-configurations-.*\.rpm") image_configurations_rpms[repository_pair.name] = rpms logging.debug("Found following image-configurations RPMs: " "{0}".format(image_configurations_rpms)) kickstart_file_paths = {} for key in image_configurations_rpms.keys(): for rpm in image_configurations_rpms[key]: directory_path = temporaries.create_temporary_directory("unpack") files.unrpm(rpm, directory_path) kickstart_file_paths[key] = files.find_fast( directory_path, ".*.ks") logging.info("Found following kickstart files:") all_kickstart_file_paths = [] for key in kickstart_file_paths.keys(): logging.info(" * in repository {0}:".format(key)) for kickstart_file_path in kickstart_file_paths[key]: basename = os.path.basename(kickstart_file_path) all_kickstart_file_paths.append(kickstart_file_path) logging.info(" * {0}".format(basename)) if len(kickstart_file_paths[key]) == 0: logging.info(" <no kickstart files in this repository>") logging.debug("Found files: {0}".format(all_kickstart_file_paths)) helper_string = "use option -k for that or \"kickstart = ...\" in config" kickstart_file_path_resulting = None if len(all_kickstart_file_paths) > 1: matching_kickstart_file_paths = [] for kickstart_file_path in all_kickstart_file_paths: basename = os.path.basename(kickstart_file_path) if kickstart_substring in basename: matching_kickstart_file_paths.append(kickstart_file_path) if len(matching_kickstart_file_paths) > 1: logging.error("More than one kickstart files satisfy the " "substring, or no substring was specified!") for kickstart_file_path in matching_kickstart_file_paths: basename = os.path.basename(kickstart_file_path) logging.error(" * {0}".format(basename)) logging.error("Please, specified the unique name of kickstart " "file or the unique substring! " "({0}).".format(helper_string)) sys.exit("Error.") else: kickstart_file_path_resulting = matching_kickstart_file_paths[0] elif len(all_kickstart_file_paths) == 1: kickstart_file_path_resulting = all_kickstart_file_paths[0] else: logging.error("No kickstart files found in repositories, please " "specify the path to kickstart file manually! " "({0}).".format(helper_string)) sys.exit("Error.") return kickstart_file_path_resulting