def _compare_data(self, base_depend_dict, compare_depend_list): """ Compare the dependency difference between the base database and other databases, and write it to a csv file :param base_depend_dict: base database dependency info :param compare_depend_list: other databases dependency info :return: None """ if not compare_depend_list: LOGGER.warning( 'There is only one database input, no comparison operation') return csv_file = os.path.join(self.out_path, 'compare.csv') with open(csv_file, 'w', encoding='utf-8') as file: csv_writer = csv.writer(file) csv_writer.writerow(self.databases) for rpm, dependency in base_depend_dict.items(): _all_dependency_list = [dependency] for other_database in compare_depend_list: _all_dependency_list.append(other_database.get(rpm, [])) _all_dependency_set = set([ info for single_db_info in _all_dependency_list for info in single_db_info ]) self._write_single_field(_all_dependency_list, _all_dependency_set, csv_writer)
def _import_error(self, error, record=True): if record: LOGGER.error(error) self._fail.append(self.elastic_index) fails = self._delete_index() if fails: LOGGER.warning("Delete the failed ES database:%s ." % fails)
def __get_subpacks(self, pkg_name_lst, is_init=False): """get source packages's subpacks Args: pkg_name_lst ([list]): [source packagenames list] Returns: [set]: [source package's subpacks] """ binary_pkgs = set() if not pkg_name_lst: return binary_pkgs searched_pkg = set() for pkg_dict in self.query_pkg.get_src_info(pkg_name_lst, self.database, 1, len(pkg_name_lst))["data"]: for _, pkg_info in pkg_dict.items(): searched_pkg.add(pkg_info.get("src_name")) binary_pkgs.update(set(pkg_info.get("subpacks", []))) self._search_set.update(searched_pkg) if is_init: not_found_pkg = str(set(pkg_name_lst) - searched_pkg) self.log_msg = f"source packages {not_found_pkg} not found in {self.database}" LOGGER.warning(self.log_msg) return binary_pkgs
def be_depend(self): """ get source(binary) rpm package(s) bedepend relation """ searched_pkgs = set() update_meth = self.__get_update_data_method() to_search = self.__init_to_serach_pkgs() is_init = True while to_search: resp = self.provide.get_be_req(to_search, self.database) if not resp: break next_search = set() for bedep_info in resp: searched_pkgs.add(bedep_info.get("binary_name")) next_pkgs = update_meth(bedep_info) next_search.update(next_pkgs) if is_init and self.parameter.get("packtype") == "binary": not_found_pkg = str(to_search - searched_pkgs) self.log_msg = f"binary packages {not_found_pkg} not found in {self.database}" LOGGER.warning(self.log_msg) is_init = False to_search = next_search - searched_pkgs
def self_depend(self, pkg_name, pkgtype="binary", self_build=False, with_subpack=False): """ Description: get source(binary) rpm package(s) self depend relation Args: pkg_name: the list of package names needed to be searched db_priority: database name list packtype: the type of query package (source/binary) with_subpack: whether to query subpackages self_build: whether to query self build Exception: AttributeError: the input value is invalid """ if not isinstance(pkg_name, list): raise AttributeError("the input is invalid") install = InstallDepend(self.db_list, self) build = BuildDepend(self.db_list, self) self.subpack = with_subpack for pkg in pkg_name: if pkg: if pkgtype == "source": self.search_subpack_dict.get("non_db").add(pkg) else: self.search_install_dict.get("non_db").add(pkg) if pkgtype == "source": self.__query_subpack(pkg_list=pkg_name, is_init=True) # end this loop while those three dictionry's value are None is_init = True while self._check_search(self.search_install_dict) \ or self._check_search(self.search_build_dict) \ or self._check_search(self.search_subpack_dict): while self._check_search(self.search_install_dict): install.install_depend([]) if is_init and pkgtype == "binary": fmt = str(set(pkg_name) - set(self.binary_dict.keys())) self.log_msg = f"binary name {fmt} not found in all database" LOGGER.warning(self.log_msg) is_init = False while self._check_search(self.search_build_dict): build.build_depend([], self_build=self_build) while with_subpack and self._check_search( self.search_subpack_dict): self.__query_subpack()
def _set_val(self, key): """ Description: Gets the cached hash value and assigns it to the corresponding dependent instance Args: key: cached key """ self._depend.source_dict = json.loads( constant.REDIS_CONN.hget(key, "source_dict")) self._depend.binary_dict = json.loads( constant.REDIS_CONN.hget(key, "binary_dict")) self._depend.log_msg = constant.REDIS_CONN.hget(key, "log_msg") if not self._depend.log_msg: return LOGGER.warning(self._depend.log_msg)
def _create_index(self, indexs): """ Description: Initializes the relevant index """ _path = os.path.join(os.path.dirname(__file__), "mappings") _indexs = [{ "file": os.path.join(_path, index + ".json"), "name": self._index(index), } for index in indexs] fails = self._session.create_index(_indexs) if fails: LOGGER.warning( "Failed to create the %s index when initializing the %s database ." % (",".join(fails), self.elastic_index)) del_index = set([self._index(index) for index in indexs]).difference(set(fails)) self._session.delete_index(list(del_index)) return fails
def _cache(self): """ Description: Gets the dependency value in the cache or executes the method to get the dependency data """ key = self._hash_key() if not key: return key = "pkgship_" + key try: if constant.REDIS_CONN.exists(key): self._set_val(key) return self._set_cache(key) except RedisError as error: LOGGER.warning(error) self._func(*self._args, **self._kwargs)
def __query_subpack(self, pkg_list=None, is_init=False): """ Description: query the source package's subpack in database Returns: resp: the response for subpack info result """ init_searched_name = set() resp = self._query_in_db(search_dict=self.search_subpack_dict, func=self.__query_pkg.get_bin_name) if not resp: LOGGER.warning("Cannot get any resp for the packages ") for pkg_info in resp: if not pkg_info: LOGGER.warning("There is a None type in resp for ") continue src_name = pkg_info.get("source_name") init_searched_name.add(src_name) db_name = pkg_info.get("database") if not (db_name and src_name): continue for bin_info in pkg_info.get("binary_infos"): bin_name = bin_info.get("bin_name") if bin_name and bin_name not in self.binary_dict: self.search_install_dict[db_name].add(bin_name) if is_init and isinstance(pkg_list, list): fmt = str(set(pkg_list) - init_searched_name) self.log_msg = f"source name {fmt} not found in all database" LOGGER.warning(self.log_msg)
def __query_one_level_dep(self, level, self_build): """ Description: query the one level build dep in database Args: level: The number of levels of dependency querying, the default value of level is 0, which means search all dependency self_build: whether to query self build Returns: resp: the response for one level depend result """ resp = self._query_in_db(search_dict=self.search_build_dict, func=self.__query_buildreq.get_build_req) # for build depend search, the first loop of search set is source packages, # but the second loop of the search set is binary packages, # so cannot use that set to remove duplicates if self.__level == 1: searched_pkg = copy.deepcopy(self._search_set) if not self_build: self._search_set.clear() # check the input packages searched result for pkg_info in resp: if not pkg_info: LOGGER.warning("There is a None type in resp") continue src_name = pkg_info.get("source_name") if not src_name: continue # check the input packages searched result if self.__level == 1: searched_pkg.discard(src_name) if not self._has_searched_dep(src_name, "build"): depend_set = set() #for non build depend, the list would be empty build_list = [] for req in pkg_info.get("requires"): com_bin_name = req.get("com_bin_name") com_src_name = req.get("com_src_name") com_db = req.get("com_database") # for self build, need to update the search dict for next search loop if self.depend_history: self.depend_history.add_search_dict( "build", com_db, com_bin_name=com_bin_name, com_src_name=com_src_name) # insert req info in last level loop if not self_build and self.__level == level: self._insert_com_info(req) # add the bin name into depend set if self._checka_and_add_com_value(req, self.search_build_dict, self_build=self_build): depend_set.add(com_bin_name) build_list = list(depend_set) self._insert_into_source_dict( name=src_name, version=pkg_info.get("src_version", "NOT FOUND"), database=pkg_info.get("database", "NOT FOUND"), build=build_list) self._search_set.clear() if self.__level == 1 and searched_pkg and not self.depend_history: self.log_msg = f"Can not find the packages: {str(searched_pkg)} in all databases" LOGGER.warning(self.log_msg)
def __query_one_level_dep(self, level): """ Description: query the one level install dep in database Args: level: The number of levels of dependency querying, the default value of level is 0, which means search all dependency Returns: resp: the response for one level depend result """ resp = self._query_in_db(search_dict=self.search_install_dict, func=self.__query_installreq.get_install_req) if self.__level == 1: searched_pkg = copy.deepcopy(self._search_set) for pkg_info in resp: if not pkg_info: LOGGER.warning("There is a None type in resp") continue bin_name = pkg_info.get("binary_name") src_name = pkg_info.get("src_name") if not bin_name: continue # check the input packages searched result if self.__level == 1: searched_pkg.discard(bin_name) if not self._has_searched_dep(bin_name, "install"): # binary pkg which has not query the installdep yet, # put it into search dict based on the database which found it depend_set = set() #for non install depend, the list would be empty install_list = [] for req in pkg_info.get("requires"): com_bin_name = req.get("com_bin_name") # insert req info in last level loop if self.__level == level: self._insert_com_info(req) if self._checka_and_add_com_value( req, self.search_install_dict): depend_set.add(com_bin_name) install_list = list(depend_set) # put the package binary info into binary result dict self._insert_into_binary_dict( name=bin_name, version=pkg_info.get("bin_version", "NOT FOUND"), source_name=pkg_info.get("src_name", "NOT FOUND"), database=pkg_info.get("database", "NOT FOUND"), install=install_list) # put the package source info into source result dict if src_name and src_name not in self.source_dict: self._insert_into_source_dict( name=src_name, version=pkg_info.get("src_version", "NOT FOUND"), database=pkg_info.get("database", "NOT FOUND")) if self.depend_history and self.depend_type == "self": self.depend_history.add_search_dict("install", pkg_info.get("database"), com_src_name=src_name) self._search_set.clear() if self.__level == 1 and searched_pkg and not self.depend_history: self.log_msg = f"Can not find the packages:{str(searched_pkg)}in all databases" LOGGER.warning(self.log_msg)