def import_packages(server_to_import, xtract_dir, logger): """ Import CentOS/RHEL packages """ import subprocess # Import packages contained in "pkg_list.txt" pkg_file = xtract_dir + "/pkg_list.txt" if os.path.isfile(pkg_file): f = open(pkg_file, 'rb') rawtext = f.read() f.close() lines = rawtext.split('\n') #tmp package building server_to_import.packages = sf.PackageList() pkg_expression = "(?P<pkg_name>^[A-Za-z0-9:\-~.+_]+)(?P<var2>\s+)" pkg_reg = re.compile(pkg_expression) for l in lines: if pkg_reg.match(l) is not None: result_re = pkg_reg.match(l) pkg_name = result_re.group('pkg_name') pkg_version = None if pkg_name.startswith("kernel-"): server_to_import.kernel_version_installed = pkg_name.replace("kernel-","") server_to_import.packages.add_package(sf.Package(pkg_name, pkg_version)) # Check importation with a wc linux command grep_wc = subprocess.check_output(('wc', '-l', str(pkg_file))) grep_wc = int(grep_wc.split(' ')[0]) if grep_wc != server_to_import.packages.get_number(): logger.error("CentOS packages importation error. Count mismatching with grep linux command") logger.error("Imported : " + str(server_to_import.packages.get_number()) + " Lines number in the file : " + str(grep_wc)) else: logger.info(str(server_to_import.packages.get_number()) + " packages imported.") else: logger.warning("Failed importation /pkg_list.txt : unable to found a valid file")
def analyze(self): """ Core function which is called in the run Do the effective work """ start_time = time.time() path = self.config.ressources_path if path is None: path = "/tmp/" # Create the directory containig data if it does not exist if not os.path.exists(path): os.makedirs(path) # We now we are in a debian distribution # Debian packages are available at this site : # https://packages.debian.org/fr/X/allpackages?format=txt.gz debian_version = self.server.osname #Check if realase packages are ever stored #Check if the realease packages are the last version #Download the last realease packages of the well distribution (Lenny, wheezy,...) # Check if other packages lists availables (updates, backports,...) analyze_distrib_list = [] # Get stable debian version list debian_list = self.config_server.debian["stable_versions"] # Filter specific distribution (exemple : wheezy) for distrib in debian_list: if debian_version in distrib: analyze_distrib_list.append(distrib) # Reorganyze the list in order to start by backports, then updates and finaly base. # Sort the list : [distrib, distrib_backports, distrib_updates] analyze_distrib_list.sort() # Rotate the list of -1 to put the first at the end : [distrib_backports, distrib_updates, distrib] analyze_distrib_list = analyze_distrib_list[ 1:] + analyze_distrib_list[:1] distribution = analyze_distrib_list[-1] self._logger.info("Debian"+str(distribution)+\ " kernel analysis in progress...") # Download packages list if necessary url = self.config_server.debian["packages_url"].replace( "DISTRIBUTION", str(distribution)) file_name = distribution + "_packages.gz" # Add a lock to prevent conflict access with the analyze package function self._lock.acquire() download_boolean = tools.downloadFile(url, file_name, self.config, path) self._lock.release() if not download_boolean: self._logger.error("Download file error") return False # Read the file containing packages list self._lock.acquire() file_path = path + file_name f_1 = gzip.open(file_path, 'rb') rawtext = f_1.read() f_1.close() self._lock.release() # Split text in lines releases = rawtext.split('\n') # Dictionnary of release kernel pkg_release_expression = r"(?P<var1>^[A-Za-z0-9:\-~.+]+)(?P<var2> \()(?P<var3>[A-Za-z0-9:\-~.+]+)(?P<var4>[ \)]{1})" pkg_release_reg = re.compile(pkg_release_expression) # List which contain the kernel package(s) kernel_pkg_dwnld = None # Regular expression to extract release package name and version for l_1 in releases: if pkg_release_reg.match(l_1) is not None: result_re = pkg_release_reg.match(l_1) pkg_name = result_re.group('var1') pkg_version = result_re.group('var3') if pkg_name == "linux-image-" + str( self.server.kernel_release ) and "dummy package" not in l_1: if kernel_pkg_dwnld is not None: self._logger.error( "Several pacakges corresponding to the release kernel version" ) return False else: kernel_pkg_dwnld = sf.Package(pkg_name, pkg_version) self.server.kernel_version_uptodate = pkg_version # Check kernel algorithm # Step 1 : check RELEASE from "uname.txt" with the downloaded package list # Ok -> next step KO -> Critical obsolescence # Step 2 : check VERSION from "uname.txt" with the downloaded package list # Ok -> uptodate KO -> next step # Step 3 : check VERSION of the host list (dpkg.txt) with the downloaded package list # Ok -> Uptodate but need reboot KO -> Obsolete version (and good release) # -------------------------------------------- # ------------------ Step 1 ------------------ # -------------------------------------------- # If release_kernel_list is empty, linux-image-"kernelRelease" not found and obsolete if kernel_pkg_dwnld is None: self.server.kernel_result = 1 # -------------------------------------------- # ------------------ Step 2 ------------------ # -------------------------------------------- else: if self.server.kernel_version_running == kernel_pkg_dwnld.version: # Kernel up to date self.server.kernel_result = 4 # -------------------------------------------- # ------------------ Step 3 ------------------ # -------------------------------------------- else: if self.server.kernel_version_installed == kernel_pkg_dwnld.version: self.server.kernel_result = 3 else: self.server.kernel_result = 2 end_time = time.time() self._logger.info("Elapsed time: " + str((end_time - start_time) * 1000) + " msecs") return True
def analyze(self): """ Core function which is called in the run Do the effective work """ start_time = time.time() path = self.config.ressources_path if path is None: path = "/tmp/" # Create the directory containig data if it does not exist if not os.path.exists(path): os.makedirs(path) # Check packages algorithm # Step 1 : check packages with RHEL packages # Ok -> uptodate KO -> undetermine (uptodate or obsolete or unchecked) # Step 2 : check name with RHEL packages # Ok -> obsolete KO -> unchecked # List of package object to analyze packages_list_to_analyze = self.packages.dict.values() # Temporary lists to manage packages packages_tmp_uptodate = [] packages_tmp_obsolete = [] packages_tmp_unchecked = [] pkg_obsolete_expression = "(?P<pkg_name_start>[A-Za-z0-9:+~\-\._]+)(?P<pkg_version>(\-))" pkg_obsolete_reg = re.compile(pkg_obsolete_expression) # Download packages list updates_list = [] updates_list = self.request_rhel_packages() self._logger.info("Nb RHEL packages omported : " + str(len(updates_list))) # -------------------------------------------- # ------------------ Step 1 ------------------ # -------------------------------------------- # tmp_list wich will be the future packages_list_to_analyze after the loop tmp_list = [] for pkg in packages_list_to_analyze: # Add the suffix .rpm # ex : package.x86_64.rpm pkg_with_arch = pkg.name + ".rpm" if pkg_with_arch in updates_list or pkg_with_arch.replace(self.server.osarchitecture + ".rpm", "noarch.rpm") in updates_list: packages_tmp_uptodate.append(sf.Package(pkg.name, pkg.version)) else: tmp_list.append(pkg) packages_list_to_analyze = tmp_list # -------------------------------------------- # ------------------ Step 2 ------------------ # -------------------------------------------- # Dictionnary wich contain the start of the package and the full line updates_dict_start={} for pkg_update in updates_list: if pkg_obsolete_reg.match(pkg_update) is not None: result_re = pkg_obsolete_reg.match(pkg_update) updates_dict_start[result_re.group('pkg_name_start')] = pkg_update else: self._logger.error("Regular expression parsing error : step 2 - a") tmp_list = [] for pkg in packages_list_to_analyze: pkg_found = False if pkg_obsolete_reg.match(pkg.name) is not None: result_re = pkg_obsolete_reg.match(pkg.name) pkg_name_start = result_re.group('pkg_name_start') for pkg_update_start in updates_dict_start.keys(): if pkg_name_start == pkg_update_start: pkg_found = True packages_tmp_obsolete.append(sf.Package(pkg.name, pkg.version, updates_dict_start[pkg_update_start])) else: self._logger.error("Regular expression parsing error : step 2 - b") if not pkg_found: tmp_list.append(pkg) packages_tmp_unchecked = tmp_list # Stored results and sort the list in name alphabetic order self.server.packages_uptodate.push_package_list(packages_tmp_uptodate) self.server.packages_obsolete.push_package_list(packages_tmp_obsolete) self.server.packages_unchecked.push_package_list(packages_tmp_unchecked) # # Fill the reporting self.server.nb_packages = self.packages.get_number() self.server.nb_packages_uptodate = self.server.packages_uptodate.get_number() self.server.nb_packages_obsolete = self.server.packages_obsolete.get_number() self.server.nb_packages_unchecked = self.server.packages_unchecked.get_number() self._logger.debug("Nb Packages : "+str(self.server.nb_packages)) self._logger.debug("Nb up to date : "+str(self.server.nb_packages_uptodate)) self._logger.debug("Nb Packages obsolete : "+str(self.server.nb_packages_obsolete)) self._logger.debug("Nb Packages unchecked : "+str(self.server.nb_packages_unchecked) ) # end_time = time.time() self._logger.info("Elapsed time: "+str((end_time - start_time) * 1000)+" msecs") self._logger.info("CentOS packages successfully analyzed !") return True
def import_packages(server_to_import, xtract_dir, logger): """ Import Debian packages """ import subprocess # Import packages contained in "pkg_list.txt" # First letter -> desired package state ("selection state"): # # u ... unknown # i ... install # r ... remove/deinstall # p ... purge (remove including config files) # h ... hold # #Second letter -> current package state: # # n ... not-installed # i ... installed # c ... config-files (only the config files are installed) # u ... unpacked # f ... half-configured (configuration failed for some reason) # h ... half-installed (installation failed for some reason) # w ... triggers-awaited (package is waiting for a trigger from another package) # t ... triggers-pending (package has been triggered) pkg_file = os.path.join(xtract_dir, "pkg_list.txt") if os.path.isfile(pkg_file): f = open(pkg_file, 'rb') rawtext = f.read() f.close() lines = rawtext.split('\n') #tmp package building server_to_import.packages = sf.PackageList() # ii packages are well installed pkg_ii_expression = r"(?P<var1>^ii )(?P<var2>[A-Za-z0-9:\-~.+]+)(?P<var3>[ ]+)(?P<var4>[A-Za-z0-9:\-~.+]+)" pkg_ii_reg = re.compile(pkg_ii_expression) for l in lines: if pkg_ii_reg.match(l) is not None: result_re = pkg_ii_reg.match(l) pkg_name = result_re.group('var2') pkg_version = result_re.group('var4') if pkg_name == "linux-image-" + str( server_to_import.kernel_release): server_to_import.kernel_version_installed = pkg_version server_to_import.packages.add_package( sf.Package(pkg_name, pkg_version)) # Check importation wicth a wc linux command grep_cmd = subprocess.Popen( ["grep", "^ii", str(pkg_file)], stdout=subprocess.PIPE) grep_wc = subprocess.check_output(('wc', '-l'), stdin=grep_cmd.stdout) grep_wc = int(grep_wc) if grep_wc != server_to_import.packages.get_number(): logger.error( "Debian packages importation error. Count mismatching with grep linux command" ) logger.error("Imported : " + str(server_to_import.packages.get_number()) + " grep ii : " + str(grep_wc)) else: logger.info( str(server_to_import.packages.get_number()) + " packages imported.") else: logger.warning( "Failed importation /pkg_list.txt : unable to found a valid file")
def analyze(self): """ Core function which is called in the run Do the effective work """ start_time = time.time() path = self.config.ressources_path if path is None: path = "/tmp/" # Create the directory containig data if it does not exist if not os.path.exists(path): os.makedirs(path) # We now we are in a debian distribution # Debian packages are available at this site : # https://packages.debian.org/fr/X/allpackages?format=txt.gz debian_version = self.server.osname self._logger.info("Debian version name : " + str(self.server.osname)) #Check if realase packages are ever stored #Check if the realease packages are the last version #Download the last realease packages of the well distribution (Lenny, wheezy,...) # Check if other packages lists availables (updates, backports,...) analyze_distrib_list = [] # Get stable debian version list # debian_list = self.config.server_debian_stables debian_list = self.config_server.debian["stable_versions"] # Filter specific distribution (exemple : wheezy) for distrib in debian_list: if debian_version in distrib: analyze_distrib_list.append(distrib) # Reorganyze the list in order to start by backports, then updates and finaly base. # Sort the list : [distrib, distrib_backports, distrib_updates] analyze_distrib_list.sort() # Rotate the list of -1 to put the first at the end : [distrib_backports, distrib_updates, distrib] analyze_distrib_list = analyze_distrib_list[ 1:] + analyze_distrib_list[:1] # Inform user of packages that will be downloaded and checked self._logger.info("Debian distribution packages that will be check") for d_1 in analyze_distrib_list: self._logger.info(d_1) # List of package object to analyze packages_list_to_analyze = self.packages.dict.values() packages_tmp_uptodate = [] packages_tmp_obsolete = [] packages_tmp_unchecked = [] # Check packages algorithm # Step 1 : check packages with backports release packages # Ok -> uptodate KO -> undetermine (uptodate or obsolete or unchecked) # Step 2 : check packages with updates release packages # Ok -> uptodate KO -> undetermine (uptodate or obsolete or unchecked) # Step 3 : check name with updates release packages # Ok -> obsolete (backports or uptodate) KO -> undetermine (uptodate or obsolete or unchecked) # Step 4 : check packages with base release packages # Ok -> uptodate KO -> undetermine (obsolete or unchecked) # Step 5 : check name with base release packages # Ok -> obsolete(base) KO -> unchecked self._logger.info("Debian" + str(self.server.osname) + " packages analysis in progress...") # Regular expression to extract release package name and version # pkg_release_expression = r"(?P<pkg_name>^[A-Za-z0-9:\-~.+]+)(?P<var2> \()(?P<pkg_version>[A-Za-z0-9:\-~.+]+)(?P<var4>[ \)]{1})" pkg_release_expression = r"(?P<pkg_name>^[A-Za-z0-9:\-~.+]+)(?P<var2> \()(?P<pkg_version>[A-Za-z0-9:\-~,.+\[\] ]+)(?P<var4>[\)]{1})" pkg_release_reg = re.compile(pkg_release_expression) # -------------------------------------------- # ------------------ Step 1 ------------------ # -------------------------------------------- distribution = analyze_distrib_list[0] # Download packages list if necessary # url = "https://packages.debian.org/fr/"+str(distribution)+\ # "/allpackages?format=txt.gz" url = self.config_server.debian["packages_url"].replace( "DISTRIBUTION", str(distribution)) file_name = distribution + "_packages.gz" self._lock.acquire() download_boolean = tools.downloadFile(url, file_name, self.config, path) self._lock.release() if not download_boolean: self._logger.error("Download file error") return False # Read the file containing packages list file_path = path + file_name f_1 = gzip.open(file_path, 'rb') rawtext = f_1.read() f_1.close() # Split text in lines releases = rawtext.split('\n') # Dictionnary of release packages release_dict = {} for l_1 in releases: if pkg_release_reg.match(l_1) is not None: result_re = pkg_release_reg.match(l_1) pkg_name = result_re.group('pkg_name') pkg_version = result_re.group('pkg_version') release_dict[pkg_name] = pkg_version # We check each package with release packages # A PackageList object contains a dictionnary and we acceed at the package # object with dict.values() for pkg in packages_list_to_analyze: # Delete the ":amd64" suffix for comparaison if pkg.name[-6:] == ":amd64": pkg.name = pkg.name[:-6] if pkg.name in release_dict.keys(): if pkg.version == release_dict[pkg.name]: packages_tmp_uptodate.append( sf.Package(pkg.name, pkg.version, None, distribution)) else: packages_tmp_unchecked.append(pkg) else: packages_tmp_unchecked.append(pkg) # ------------------------------------------------------- # ------------ Step 2 (and 4 with the loop) ------------- # ------------------------------------------------------- for distribution in analyze_distrib_list[1:]: packages_list_to_analyze = packages_tmp_unchecked # packages_tmp_obsolete = [] packages_tmp_unchecked = [] # Download packages list if necessary # url = "https://packages.debian.org/fr/"+str(distribution)+\ # "/allpackages?format=txt.gz" url = self.config_server.debian["packages_url"].replace( "DISTRIBUTION", str(distribution)) file_name = distribution + "_packages.gz" self._lock.acquire() download_boolean = tools.downloadFile(url, file_name, self.config, path) self._lock.release() if not download_boolean: self._logger.error("Download file error") return False # Read the file containing packages list file_path = path + file_name f_1 = gzip.open(file_path, 'rb') rawtext = f_1.read() f_1.close() # Split text in lines releases = rawtext.split('\n') # Dictionnary of release packages release_dict = {} for l_1 in releases: if pkg_release_reg.match(l_1) is not None: arch = self.server.kernel_release.split("-")[-1:][0] result_re = pkg_release_reg.match(l_1) pkg_name = result_re.group('pkg_name') pkg_version = result_re.group('pkg_version') elts = pkg_version.split("], ") for elt in elts: if ("[" + arch + "," in elt) or (" " + arch + "," in elt) or (" " + arch + "]" in elt): pkg_version = elt.split(" ")[0] if "[" not in elt: pkg_version = elt release_dict[pkg_name] = pkg_version # We check each package with release packages # A PackageList object contains a dictionnary and we acceed at the package # object with dict.values() for pkg in packages_list_to_analyze: # Delete the ":amd64" suffix for comparaison if pkg.name[-6:] == ":amd64": pkg.name = pkg.name[:-6] if pkg.name in release_dict.keys(): if pkg.version == release_dict[pkg.name]: packages_tmp_uptodate.append( sf.Package(pkg.name, pkg.version, None, distribution)) else: packages_tmp_obsolete.append( sf.Package(pkg.name, pkg.version, release_dict[pkg.name], distribution)) else: packages_tmp_unchecked.append(pkg) self._logger.debug("Nb Packages : " + str(self.packages.get_number())) self._logger.debug("Nb up to date : " + str(len(packages_tmp_uptodate))) self._logger.debug("Nb Packages obsolete : " + str(len(packages_tmp_obsolete))) self._logger.debug("Nb Packages unchecked : " + str(len(packages_tmp_unchecked))) #------------------------------------------------------------------------ self._logger.info("Debian " + str(distribution) + " packages successfully analyzed") # Stored results and sort the list in name alphabetic order self.server.packages_uptodate.push_package_list(packages_tmp_uptodate) self.server.packages_obsolete.push_package_list(packages_tmp_obsolete) self.server.packages_unchecked.push_package_list( packages_tmp_unchecked) # Fill the reporting self.server.nb_packages = self.packages.get_number() self.server.nb_packages_uptodate = self.server.packages_uptodate.get_number( ) self.server.nb_packages_obsolete = self.server.packages_obsolete.get_number( ) self.server.nb_packages_unchecked = self.server.packages_unchecked.get_number( ) self._logger.debug("Nb Packages : " + str(self.server.nb_packages)) self._logger.debug("Nb up to date : " + str(self.server.nb_packages_uptodate)) self._logger.debug("Nb Packages obsolete : " + str(self.server.nb_packages_obsolete)) self._logger.debug("Nb Packages unchecked : " + str(self.server.nb_packages_unchecked)) end_time = time.time() self._logger.info("Elapsed time: " + str((end_time - start_time) * 1000) + " msecs") self._logger.info("Debian packages successfully analyzed !") return True
def analyze(self): """ Core function which is called in the run Do the effective work """ start_time = time.time() path = self.config.ressources_path if path is None: path = "/tmp/" # Create the directory containig data if it does not exist if not os.path.exists(path): os.makedirs(path) # Check packages algorithm # Step 1 : check packages in updates packages # Ok -> uptodate KO -> undetermine (uptodate or obsolete or unchecked) # Step 2 : check name with updates packages # Ok -> obsolete KO -> undetermine (uptodate or obsolete or unchecked) # Step 3 : check packages with centos release # Ok -> uptodate KO -> undetermine (obsolete or unchecked) # Step 4 : check name with centos release # Ok -> obsolete KO -> unchecked # List of package object to analyze packages_list_to_analyze = self.packages.dict.values() # Temporary lists to manage packages packages_tmp_uptodate = [] packages_tmp_obsolete = [] packages_tmp_unchecked = [] # Download packages list if necessary # "http://mirrors.atosworldline.com/public/centos/filelist.gz" url = self.config_server.centos["packages_url_stable_version"] print "url : " + url file_name = "centos_packages.gz" pkg_string_header = "./" + self.server.osversion + "/os/" + self.server.osarchitecture + "/CentOS/" pkg_string_header_update = "./" + self.server.osversion + "/updates/" + self.server.osarchitecture + "/RPMS/" pkg_release_expression = pkg_string_header + "(?P<pkg_release>[A-Za-z0-9:\-~.+_]+)" pkg_update_expression = pkg_string_header_update + "(?P<pkg_update>[A-Za-z0-9:\-~.+_]+)" #pkg_obsolete_expression = "(?P<pkg_name_start>[A-Za-z0-9:+~\-\._]+)(?P<pkg_version>(\-))" pkg_obsolete_expression = "(?P<pkg_name_start>[A-Za-z0-9:+~\-\._]+)" pkg_release_reg = re.compile(pkg_release_expression) pkg_update_reg = re.compile(pkg_update_expression) pkg_obsolete_reg = re.compile(pkg_obsolete_expression) # List of release packages release_list = [] # List of updates packages updates_list = [] if self.server.osversion in self.config_server.centos["stable_versions"]: self._lock.acquire() download_boolean = tools.downloadFile(url, file_name, self.config, path) self._lock.release() if not download_boolean: self._logger.error("Download file error") return False # Read the downloaded file containing packages list file_path = path + file_name f_1 = gzip.open(file_path,'rb') rawtext = f_1.read() f_1.close() # Split text in lines releases = rawtext.split('\n') # Read the lines of packages and fill the release and update pacakges lists for l_1 in releases: if pkg_release_reg.match(l_1) is not None: # Fill the release list result_re = pkg_release_reg.match(l_1) pkg_name = result_re.group('pkg_release') release_list.append(pkg_name) elif pkg_update_reg.match(l_1) is not None: # Fill the updates list result_re = pkg_update_reg.match(l_1) pkg_name = result_re.group('pkg_update') updates_list.append(pkg_name) else: # url_os = "http://vault.centos.org/" + str(self.server.osversion) + "/os/" + str(self.server.osarchitecture) + "/CentOS/" url_os = "http://vault.centos.org/" + str(self.server.osversion) + "/os/SRPMS/" url_updates = "http://vault.centos.org/" + str(self.server.osversion) + "/updates/SRPMS/" # + str(self.server.osarchitecture) + "/RPMS/" pattern = r"(?P<var1>.*)(?P<var2><a href=\")(?P<pkg>.+)(?P<var3>\.src.rpm\">)" reg = re.compile(pattern) lines_os = urllib2.urlopen(url_os).read().split('\n') lines_updates = urllib2.urlopen(url_updates).read().split('\n') for line in lines_os: if reg.match(line): result_re = reg.match(line) # release_list.append(result_re.group('pkg') + ".rpm") release_list.append(result_re.group('pkg')) for line in lines_updates: if reg.match(line): result_re = reg.match(line) # updates_list.append(result_re.group('pkg') + ".rpm") updates_list.append(result_re.group('pkg')) # -------------------------------------------- # ------------------ Step 1 ------------------ # -------------------------------------------- # tmp_list wich will be the future packages_list_to_analyze after the loop tmp_list = [] for pkg in packages_list_to_analyze: # Add the suffix .osarchitecure.rpm # ex : package.x86_64.rpm pkg_with_arch = pkg.name + "." + self.server.osarchitecture + ".rpm" if pkg_with_arch in updates_list or pkg_with_arch.replace(self.server.osarchitecture + ".rpm", "noarch.rpm") in updates_list: packages_tmp_uptodate.append(sf.Package(pkg.name, pkg.version)) else: tmp_list.append(pkg) packages_list_to_analyze = tmp_list # -------------------------------------------- # ------------------ Step 2 ------------------ # -------------------------------------------- # Dictionnary wich contain the start of the pacakge and the full line updates_dict_start={} for pkg_update in updates_list: if pkg_obsolete_reg.match(pkg_update) is not None: result_re = pkg_obsolete_reg.match(pkg_update) updates_dict_start[result_re.group('pkg_name_start')] = pkg_update else: self._logger.error("Regular expression parsing error : step 2 - a") tmp_list = [] for pkg in packages_list_to_analyze: pkg_found = False if pkg_obsolete_reg.match(pkg.name) is not None: result_re = pkg_obsolete_reg.match(pkg.name) pkg_name_start = result_re.group('pkg_name_start') for pkg_update_start in updates_dict_start.keys(): if pkg_name_start == pkg_update_start: pkg_found = True packages_tmp_obsolete.append(sf.Package(pkg.name, pkg.version, updates_dict_start[pkg_update_start])) else: self._logger.error("Regular expression parsing error : step 2 - b") if not pkg_found: tmp_list.append(pkg) packages_list_to_analyze = tmp_list # -------------------------------------------- # ------------------ Step 3 ------------------ # -------------------------------------------- tmp_list = [] for pkg in packages_list_to_analyze: # Add the suffix .osarchitecure.rpm # ex : package.x86_64.rpm pkg_with_arch = pkg.name + "." + self.server.osarchitecture + ".rpm" if pkg_with_arch in release_list or pkg_with_arch.replace(self.server.osarchitecture + ".rpm", "noarch.rpm") in release_list: packages_tmp_uptodate.append(sf.Package(pkg.name, pkg.version)) else: tmp_list.append(pkg) packages_list_to_analyze = tmp_list # -------------------------------------------- # ------------------ Step 4 ------------------ # -------------------------------------------- release_dict_start={} print len(release_list) for pkg_release in release_list: if pkg_obsolete_reg.match(pkg_release) is not None: result_re = pkg_obsolete_reg.match(pkg_release) release_dict_start[result_re.group('pkg_name_start')] = pkg_release else: self._logger.error("Regular expression parsing error : step 4 - a") for pkg in packages_list_to_analyze: # print "Package :: " + pkg.name pkg_found = False if pkg_obsolete_reg.match(pkg.name) is not None: result_re = pkg_obsolete_reg.match(pkg.name) pkg_name_start = result_re.group('pkg_name_start') # print "info :: " + pkg_name_start if pkg.name == pkg_name_start: pkg_found = True packages_tmp_uptodate.append(sf.Package(pkg.name, pkg.version, "")) print " Package installé : " + pkg.name + "// Package attendu : " + pkg_name_start else: print "Package installé : " + pkg.name print "Package attendu :" + pkg_name_start #for pkg_release_start in release_dict_start.keys(): # if pkg_name_start == pkg_release_start: # pkg_found = True # packages_tmp_obsolete.append(sf.Package(pkg.name, pkg.version, release_dict_start[pkg_release_start])) else: self._logger.error("Regular expression parsing error : step 4 - b") print "plop" if not pkg_found: packages_tmp_unchecked.append(sf.Package(pkg.name, pkg.version)) # # Stored results and sort the list in name alphabetic order self.server.packages_uptodate.push_package_list(packages_tmp_uptodate) self.server.packages_obsolete.push_package_list(packages_tmp_obsolete) self.server.packages_unchecked.push_package_list(packages_tmp_unchecked) # # Fill the reporting self.server.nb_packages = self.packages.get_number() self.server.nb_packages_uptodate = self.server.packages_uptodate.get_number() self.server.nb_packages_obsolete = self.server.packages_obsolete.get_number() self.server.nb_packages_unchecked = self.server.packages_unchecked.get_number() self._logger.debug("Nb Packages : "+str(self.server.nb_packages)) self._logger.debug("Nb up to date : "+str(self.server.nb_packages_uptodate)) self._logger.debug("Nb Packages obsolete : "+str(self.server.nb_packages_obsolete)) self._logger.debug("Nb Packages unchecked : "+str(self.server.nb_packages_unchecked) ) # end_time = time.time() self._logger.info("Elapsed time: "+str((end_time - start_time) * 1000)+" msecs") self._logger.info("CentOS packages successfully analyzed !") return True