def process_deps(self, deps): needed_list = dict((label, []) for label in self.channel_map.values()) unsolved_deps = [] print('Processing Dependencies:') pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(deps), finalBarLength=40, stream=sys.stdout) pb.printAll(1) #loop through all the deps and find any that don't exist in the # destination channels for pkg in deps: pb.addTo(1) pb.printIncrement() for solved_list in pkg.values(): found = False for cloner in self.cloners: exists_from = cloner.src_pkg_exist(solved_list) exists_to = cloner.dest_pkg_exist(solved_list) if exists_from and not exists_to: needed_list[cloner.dest_label()].append( solved_list[0]) #grab oldest package elif exists_from: found = True if not found: unsolved_deps.append((pkg)) pb.printComplete() for cloner in self.cloners: needed = needed_list[cloner.dest_label()] if len(needed) > 0: cloner.process_deps(needed)
def __locateDeps(self, pkgs): pkgSack = self._repostore.sack.query() results = {} a = pkgSack.available() print("Solving Dependencies (%i): " % len(pkgs)) pb = ProgressBar(prompt='', endTag=' - complete', finalSize=len(pkgs), finalBarLength=40, stream=sys.stdout) pb.printAll(1) for pkg in pkgs: pb.addTo(1) pb.printIncrement() results[pkg] = {} reqs = pkg.requires pkgresults = results[pkg] for req in reqs: if str(req).startswith('rpmlib('): continue satisfiers = [] for po in a.filter(provides=req).latest(): satisfiers.append(po) pkgresults[req] = satisfiers pb.printComplete() return results
def __unsubscribeServers(labels): sql = """ select distinct sc.server_id as server_id, C.id as channel_id, c.parent_channel, c.label from rhnChannel c inner join rhnServerChannel sc on c.id = sc.channel_id where c.label in (%s) order by C.parent_channel """ params, bind_params = _bind_many(labels) bind_params = ', '.join(bind_params) h = rhnSQL.prepare(sql % (bind_params)) h.execute(**params) server_channel_list = h.fetchall_dict() channel_counts = {} for i in server_channel_list: if i['label'] in channel_counts: channel_counts[i['label']] = channel_counts[i['label']] + 1 else: channel_counts[i['label']] = 1 print("\nThe following channels will have their systems unsubscribed:") channel_list = channel_counts.keys() channel_list.sort() for i in channel_list: print("%-40s %-8s" % (i, channel_counts[i])) pb = ProgressBar(prompt='Unsubscribing: ', endTag=' - complete', finalSize=len(server_channel_list), finalBarLength=40, stream=sys.stdout) pb.printAll(1) unsubscribe_server_proc = rhnSQL.Procedure("rhn_channel.unsubscribe_server") for i in server_channel_list: unsubscribe_server_proc(i['server_id'], i['channel_id']) pb.addTo(1) pb.printIncrement() pb.printComplete()
def __locateDeps(self, pkgs): results = {} regex_filename_match = re.compile(r'[/*?]|\[[^]]*/[^]]*\]').match print("Solving Dependencies (%i): " % len(pkgs)) pb = ProgressBar(prompt='', endTag=' - complete', finalSize=len(pkgs), finalBarLength=40, stream=sys.stdout) pb.printAll(1) for pkg in pkgs: pb.addTo(1) pb.printIncrement() results[pkg] = {} reqs = pkg.requires reqs.sort() pkgresults = results[pkg] for req in reqs: (r, f, v) = req if r.startswith('rpmlib('): continue satisfiers = [] for po in self.__whatProvides(r, f, v): # verify this po indeed provides the dep, # el5 version could give some false positives if regex_filename_match(r) or \ po.checkPrco('provides', (r, f, v)): satisfiers.append(po) pkgresults[req] = satisfiers pb.printComplete() return results
def clone(self): errata_ids = [e["advisory_name"] for e in self.errata_to_clone] if not errata_ids: return msg = 'Cloning Errata into %s (%i):' % (self.to_label, len(errata_ids)) print(msg) log_clean(0, "") log_clean(0, msg) for e in sorted(self.errata_to_clone, key=lambda x: x['advisory_name']): log_clean(0, "%s - %s" % (e['advisory_name'], e['synopsis'])) pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(errata_ids), finalBarLength=40, stream=sys.stdout) pb.printAll(1) while (errata_ids): errata_set = errata_ids[:self.bunch_size] del errata_ids[:self.bunch_size] self.remote_api.clone_errata(self.to_label, errata_set) pb.addTo(self.bunch_size) pb.printIncrement() self.reset_new_pkgs() pb.printComplete() if not self.no_errata_sync: log_clean(0, "") log_clean( 0, "Synchronizing Errata in %s with originals" % self.to_label) self.remote_api.sync_errata(self.to_label)
def clone(self): errata_ids = [e["advisory_name"] for e in self.errata_to_clone] if len(errata_ids) == 0: return msg = 'Cloning Errata into %s (%i):' % (self.to_label, len(errata_ids)) print msg log_clean(0, "") log_clean(0, msg) for e in self.errata_to_clone: log_clean(0, "%s - %s" % (e['advisory_name'], e['synopsis'])) pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(errata_ids), finalBarLength=40, stream=sys.stdout) pb.printAll(1) while(len(errata_ids) > 0): errata_set = errata_ids[:self.bunch_size] del errata_ids[:self.bunch_size] self.remote_api.clone_errata(self.to_label, errata_set) pb.addTo(self.bunch_size) pb.printIncrement() self.reset_new_pkgs() pb.printComplete() if not self.no_errata_sync: log_clean(0, "") log_clean(0, "Synchronizing Errata in %s with originals" % self.to_label) self.remote_api.sync_errata(self.to_label)
def process_deps(self, deps): needed_list = dict((channel[0], []) for channel in self.channel_map.values()) unsolved_deps = [] print('Processing Dependencies:') pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(deps), finalBarLength=40, stream=sys.stdout) pb.printAll(1) #loop through all the deps and find any that don't exist in the # destination channels for pkg in deps: pb.addTo(1) pb.printIncrement() for solved_list in pkg.values(): found = False for cloner in self.cloners: exists_from = cloner.src_pkg_exist(solved_list) exists_to = cloner.dest_pkg_exist(solved_list) if exists_from and not exists_to: #grab oldest package needed_list[cloner.dest_label()].append(solved_list[0]) elif exists_from: found = True if not found: unsolved_deps.append((pkg)) pb.printComplete() for cloner in self.cloners: needed = needed_list[cloner.dest_label()] if len(needed) > 0: cloner.process_deps(needed)
def process_deps(self, deps): # pylint: disable=deprecated-lambda, unnecessary-lambda list_to_set = lambda x: set(map(lambda y: tuple(y), x)) needed_list = dict( (channel[0], []) for channel in self.channel_map.values()) for cloner in self.cloners: if not cloner.dest_label() in self.visited: self.visited[cloner.dest_label()] = list_to_set( needed_list[cloner.dest_label()]) self.visited[cloner.dest_label()] |= list_to_set( needed_list[cloner.dest_label()]) print('Processing Dependencies:') pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(deps), finalBarLength=40, stream=sys.stdout) pb.printAll(1) #loop through all the deps and find any that don't exist in the # destination channels for pkg in deps: pb.addTo(1) pb.printIncrement() for solved_list in pkg.values(): for cloner in self.cloners: if cloner.src_pkg_exist( solved_list ) and not cloner.dest_pkg_exist(solved_list): #grab oldest package needed_list[cloner.dest_label()].append(solved_list[0]) added_nevras = set() for cloner in self.cloners: needed = needed_list[cloner.dest_label()] needed_str = list_to_set(needed) for needed_pkg in needed_str: if needed_pkg in self.visited[cloner.dest_label()]: while list(needed_pkg) in needed: needed.remove(list(needed_pkg)) self.visited[cloner.dest_label()] |= needed_str if len(needed) > 0: next_added = set(cloner.process_deps(needed)) added_nevras = added_nevras | next_added cloner.total_added_nevras += len(next_added) pb.printComplete() # recursively solve dependencies to get dependencies-of-dependencies if len(added_nevras) > 0: print 'Dependencies added, looking for new dependencies' self.__dep_solve(list(added_nevras))
def _delete_rpms(packageIds): if not packageIds: return group = 300 toDel = packageIds[:] print "Deleting package metadata (" + str(len(toDel)) + "):" pb = ProgressBar(prompt='Removing: ', endTag=' - complete', finalSize=len(packageIds), finalBarLength=40, stream=sys.stdout) pb.printAll(1) while len(toDel) > 0: _delete_rpm_group(toDel[:group]) del toDel[:group] pb.addTo(group) pb.printIncrement() pb.printComplete()
def process_deps(self, deps): # pylint: disable=deprecated-lambda, unnecessary-lambda list_to_set = lambda x: set(map(lambda y: tuple(y), x)) needed_list = dict((channel[0], []) for channel in self.channel_map.values()) for cloner in self.cloners: if not cloner.dest_label() in self.visited: self.visited[cloner.dest_label()] = list_to_set(needed_list[cloner.dest_label()]) self.visited[cloner.dest_label()] |= list_to_set(needed_list[cloner.dest_label()]) print('Processing Dependencies:') pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(deps), finalBarLength=40, stream=sys.stdout) pb.printAll(1) #loop through all the deps and find any that don't exist in the # destination channels for pkg in deps: pb.addTo(1) pb.printIncrement() for solved_list in pkg.values(): for cloner in self.cloners: if cloner.src_pkg_exist(solved_list) and not cloner.dest_pkg_exist(solved_list): #grab oldest package needed_list[cloner.dest_label()].append(solved_list[0]) added_nevras = set() for cloner in self.cloners: needed = needed_list[cloner.dest_label()] needed_str = list_to_set(needed) for needed_pkg in needed_str: if needed_pkg in self.visited[cloner.dest_label()]: while list(needed_pkg) in needed: needed.remove(list(needed_pkg)) self.visited[cloner.dest_label()] |= needed_str if len(needed) > 0: next_added = set(cloner.process_deps(needed)) added_nevras = added_nevras | next_added cloner.total_added_nevras += len(next_added) pb.printComplete() # recursively solve dependencies to get dependencies-of-dependencies if len(added_nevras) > 0: print 'Dependencies added, looking for new dependencies' self.__dep_solve(list(added_nevras))
def __locateDeps(self, pkgs): results = {} if not self.quiet: print(("Solving Dependencies (%i): " % len(pkgs))) pb = ProgressBar(prompt='', endTag=' - complete', finalSize=len(pkgs), finalBarLength=40, stream=sys.stdout) pb.printAll(1) for pkg in pkgs: if not self.quiet: pb.addTo(1) pb.printIncrement() results[pkg] = {} reqs = pkg.lookup_deparray(solv.SOLVABLE_REQUIRES) pkgresults = results[pkg] for req in reqs: pkgresults[req] = self.pool.whatprovides(req) if not self.quiet: pb.printComplete() return results
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print("No SHA256 capable packages to process.") if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print("Processing %s SHA256 capable packages" % len(packages)) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, int('0644', 8)) except OSError: e = sys.exc_info()[1] message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError: e = sys.exc_info()[1] message = "Error when removing %s: %s" % (old_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) pb.printComplete()
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print "No SHA256 capable packages to process." if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print "Processing %s SHA256 capable packages" % len(packages) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', \ finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, 0644) except OSError, e: message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print message if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError, e: message = "Error when removing %s: %s" % (old_abs_path, str(e)) print message if debug: log.writeMessage(message) sys.exit(1)
nrows = query_count.fetchall_dict()[0]['cnt'] query = rhnSQL.prepare(_non_ascii_changelog_data) query.execute() if nrows == 0: msg = "No non-ASCII changelog entries to process." print msg if debug: log.writeMessage(msg) return if verbose: print "Processing %s non-ASCII changelog entries" % nrows pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=nrows, finalBarLength=40, stream=sys.stdout) pb.printAll(1) update_name = rhnSQL.prepare(_update_changelog_data_name) update_text = rhnSQL.prepare(_update_changelog_data_text) while (True): row = query.fetchone_dict() if not row: # No more packages in DB to process break pb.addTo(1) pb.printIncrement() name_u = row['name'].decode('utf8', 'ignore') name_fixed = name_u
def process_package_data(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_path_sql = rhnSQL.prepare(_get_path_query) _update_package_path = rhnSQL.prepare(_update_pkg_path_query) _get_path_sql.execute() paths = _get_path_sql.fetchall_dict() if not paths: # Nothing to change return if verbose: print "Processing %s packages" % len(paths) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', \ finalSize=len(paths), finalBarLength=40, stream=sys.stdout) pb.printAll(1) skip_list = [] new_ok_list = [] i = 0 for path in paths: pb.addTo(1) pb.printIncrement() old_path_nvrea = path['path'].split('/') org_id = old_path_nvrea[1] # pylint: disable=W0703 try: nevra = parseRPMFilename(old_path_nvrea[-1]) if nevra[1] in [None, '']: nevra[1] = path['epoch'] except Exception: # probably not an rpm skip if debug: log.writeMessage("Skipping: %s Not a valid rpm" \ % old_path_nvrea[-1]) continue old_abs_path = os.path.join(CFG.MOUNT_POINT, path['path']) checksum_type = path['checksum_type'] checksum = path['checksum'] new_path = get_package_path(nevra, org_id, prepend=old_path_nvrea[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) bad_abs_path = os.path.join(CFG.MOUNT_POINT, \ get_package_path(nevra, org_id, prepend=old_path_nvrea[0], omit_epoch = True, checksum=checksum)) if not os.path.exists(old_abs_path): if os.path.exists(new_abs_path): new_ok_list.append(new_abs_path) if debug: log.writeMessage("File %s already on final path %s" % (path['path'], new_abs_path)) old_abs_path = new_abs_path elif os.path.exists(bad_abs_path): log.writeMessage("File %s found on %s" % (path['path'], bad_abs_path)) old_abs_path = bad_abs_path else: skip_list.append(old_abs_path) if debug: log.writeMessage("Missing path %s for package %d" % (old_abs_path, path['id'])) continue # pylint: disable=W0703 try: hdr = rhn_rpm.get_package_header(filename=old_abs_path) except Exception, e: msg = "Exception occurred when reading package header %s: %s" % \ (old_abs_path, str(e)) print msg if debug: log.writeMessage(msg) rhnSQL.commit() sys.exit(1) if old_abs_path != new_abs_path: new_abs_dir = os.path.dirname(new_abs_path) # relocate the package on the filer if debug: log.writeMessage("Relocating %s to %s on filer" \ % (old_abs_path, new_abs_path)) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) shutil.move(old_abs_path, new_abs_path) # Clean up left overs os.removedirs(os.path.dirname(old_abs_path)) # make the path readable os.chmod(new_abs_path, 0644) # Update the db paths _update_package_path.execute(the_id= path['id'], \ new_path = new_path ) if debug: log.writeMessage("query Executed: update rhnPackage %d to %s" \ % ( path['id'], new_path )) # Process gpg key ids server_packages.processPackageKeyAssociations(hdr, checksum_type, checksum) if debug: log.writeMessage("gpg key info updated from %s" % new_abs_path) i = i + 1 # we need to break the transaction to smaller pieces if i % 1000 == 0: rhnSQL.commit()
nrows = query_count.fetchall_dict()[0]['cnt'] query = rhnSQL.prepare(_non_ascii_changelog_data) query.execute() if nrows == 0: msg = "No non-ASCII changelog entries to process." print msg if debug: log.writeMessage(msg) return if verbose: print "Processing %s non-ASCII changelog entries" % nrows pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', \ finalSize=nrows, finalBarLength=40, stream=sys.stdout) pb.printAll(1) update_name = rhnSQL.prepare(_update_changelog_data_name) update_text = rhnSQL.prepare(_update_changelog_data_text) while (True): row = query.fetchone_dict() if not row: # No more packages in DB to process break pb.addTo(1) pb.printIncrement() name_u = row['name'].decode('utf8', 'ignore') name_fixed = name_u
def process_changelog(): def convert(u): last = '' while u != last: last = u try: u = last.encode('iso8859-1').decode('utf8') except (UnicodeDecodeError, UnicodeEncodeError): e = sys.exc_info()[1] if e.reason == 'unexpected end of data': u = u[:-1] continue else: break return u if CFG.db_backend == 'postgresql': lengthb = "octet_length(%s)" else: lengthb = "lengthb(%s)" _non_ascii_changelog_data_count = """select count(*) as cnt from rhnpackagechangelogdata where length(name) <> %s or length(text) <> %s """ % (lengthb % 'name', lengthb % 'text') _non_ascii_changelog_data = """select * from rhnpackagechangelogdata where length(name) <> %s or length(text) <> %s """ % (lengthb % 'name', lengthb % 'text') _update_changelog_data_name = """update rhnpackagechangelogdata set name = :name where id = :id""" _update_changelog_data_text = """update rhnpackagechangelogdata set text = :text where id = :id""" if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) query_count = rhnSQL.prepare(_non_ascii_changelog_data_count) query_count.execute() nrows = query_count.fetchall_dict()[0]['cnt'] query = rhnSQL.prepare(_non_ascii_changelog_data) query.execute() if nrows == 0: msg = "No non-ASCII changelog entries to process." print(msg) if debug: log.writeMessage(msg) return if verbose: print(("Processing %s non-ASCII changelog entries" % nrows)) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=nrows, finalBarLength=40, stream=sys.stdout) pb.printAll(1) update_name = rhnSQL.prepare(_update_changelog_data_name) update_text = rhnSQL.prepare(_update_changelog_data_text) while (True): row = query.fetchone_dict() if not row: # No more packages in DB to process break pb.addTo(1) pb.printIncrement() name_u = row['name'].decode('utf8', 'ignore') name_fixed = name_u if len(row['name']) != len(name_u): name_fixed = convert(name_u) if name_fixed != name_u: if debug and verbose: log.writeMessage("Fixing record %s: name: '%s'" % (row['id'], row['name'])) update_name.execute(id=row['id'], name=name_fixed) text_u = row['text'].decode('utf8', 'ignore') text_fixed = text_u if len(row['text']) != len(text_u): text_fixed = convert(text_u) if text_fixed != text_u: if debug and verbose: log.writeMessage("Fixing record %s: text: '%s'" % (row['id'], row['text'])) update_text.execute(id=row['id'], text=text_fixed) rhnSQL.commit() pb.printComplete()
def process_package_data(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_path_sql = rhnSQL.prepare(_get_path_query) _update_package_path = rhnSQL.prepare(_update_pkg_path_query) _get_path_sql.execute() paths = _get_path_sql.fetchall_dict() if not paths: # Nothing to change return if verbose: print("Processing %s packages" % len(paths)) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=len(paths), finalBarLength=40, stream=sys.stdout) pb.printAll(1) skip_list = [] new_ok_list = [] i = 0 for path in paths: pb.addTo(1) pb.printIncrement() old_path_nvrea = path['path'].split('/') org_id = old_path_nvrea[1] # pylint: disable=W0703 try: nevra = parseRPMFilename(old_path_nvrea[-1]) if nevra[1] in [None, '']: nevra[1] = path['epoch'] except Exception: # probably not an rpm skip if debug: log.writeMessage("Skipping: %s Not a valid rpm" % old_path_nvrea[-1]) continue old_abs_path = os.path.join(CFG.MOUNT_POINT, path['path']) checksum_type = path['checksum_type'] checksum = path['checksum'] new_path = get_package_path(nevra, org_id, prepend=old_path_nvrea[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) bad_abs_path = os.path.join(CFG.MOUNT_POINT, get_package_path(nevra, org_id, prepend=old_path_nvrea[0], omit_epoch=True, checksum=checksum)) if not os.path.exists(old_abs_path): if os.path.exists(new_abs_path): new_ok_list.append(new_abs_path) if debug: log.writeMessage("File %s already on final path %s" % (path['path'], new_abs_path)) old_abs_path = new_abs_path elif os.path.exists(bad_abs_path): log.writeMessage("File %s found on %s" % (path['path'], bad_abs_path)) old_abs_path = bad_abs_path else: skip_list.append(old_abs_path) if debug: log.writeMessage("Missing path %s for package %d" % (old_abs_path, path['id'])) continue # pylint: disable=W0703 try: hdr = rhn_rpm.get_package_header(filename=old_abs_path) except Exception: e = sys.exc_info()[1] msg = "Exception occurred when reading package header %s: %s" % \ (old_abs_path, str(e)) print(msg) if debug: log.writeMessage(msg) rhnSQL.commit() sys.exit(1) if old_abs_path != new_abs_path: new_abs_dir = os.path.dirname(new_abs_path) # relocate the package on the filer if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) shutil.move(old_abs_path, new_abs_path) # Clean up left overs os.removedirs(os.path.dirname(old_abs_path)) # make the path readable os.chmod(new_abs_path, int('0644', 8)) # Update the db paths _update_package_path.execute(the_id=path['id'], new_path=new_path) if debug: log.writeMessage("query Executed: update rhnPackage %d to %s" % (path['id'], new_path)) # Process gpg key ids server_packages.processPackageKeyAssociations(hdr, checksum_type, checksum) if debug: log.writeMessage("gpg key info updated from %s" % new_abs_path) i = i + 1 # we need to break the transaction to smaller pieces if i % 1000 == 0: rhnSQL.commit() pb.printComplete() # All done, final commit rhnSQL.commit() sys.stderr.write("Transaction Committed! \n") if verbose: print(" Skipping %s packages, paths not found" % len(skip_list)) if len(new_ok_list) > 0 and verbose: print(" There were %s packages found in the correct location" % len(new_ok_list)) return
def process_changelog(): def convert(u): last = '' while u != last: last = u try: u = last.encode('iso8859-1').decode('utf8') except (UnicodeDecodeError, UnicodeEncodeError): e = sys.exc_info()[1] if e.reason == 'unexpected end of data': u = u[:-1] continue else: break return u if CFG.db_backend == 'postgresql': lengthb = "octet_length(%s)" else: lengthb = "lengthb(%s)" _non_ascii_changelog_data_count = """select count(*) as cnt from rhnpackagechangelogdata where length(name) <> %s or length(text) <> %s """ % (lengthb % 'name', lengthb % 'text') _non_ascii_changelog_data = """select * from rhnpackagechangelogdata where length(name) <> %s or length(text) <> %s """ % (lengthb % 'name', lengthb % 'text') _update_changelog_data_name = """update rhnpackagechangelogdata set name = :name where id = :id""" _update_changelog_data_text = """update rhnpackagechangelogdata set text = :text where id = :id""" if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) query_count = rhnSQL.prepare(_non_ascii_changelog_data_count) query_count.execute() nrows = query_count.fetchall_dict()[0]['cnt'] query = rhnSQL.prepare(_non_ascii_changelog_data) query.execute() if nrows == 0: msg = "No non-ASCII changelog entries to process." print(msg) if debug: log.writeMessage(msg) return if verbose: print("Processing %s non-ASCII changelog entries" % nrows) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=nrows, finalBarLength=40, stream=sys.stdout) pb.printAll(1) update_name = rhnSQL.prepare(_update_changelog_data_name) update_text = rhnSQL.prepare(_update_changelog_data_text) while (True): row = query.fetchone_dict() if not row: # No more packages in DB to process break pb.addTo(1) pb.printIncrement() name_u = row['name'].decode('utf8', 'ignore') name_fixed = name_u if len(row['name']) != len(name_u): name_fixed = convert(name_u) if name_fixed != name_u: if debug and verbose: log.writeMessage("Fixing record %s: name: '%s'" % (row['id'], row['name'])) update_name.execute(id=row['id'], name=name_fixed) text_u = row['text'].decode('utf8', 'ignore') text_fixed = text_u if len(row['text']) != len(text_u): text_fixed = convert(text_u) if text_fixed != text_u: if debug and verbose: log.writeMessage("Fixing record %s: text: '%s'" % (row['id'], row['text'])) update_text.execute(id=row['id'], text=text_fixed) rhnSQL.commit() pb.printComplete()