def _import_signatures(self): for package in self.batch: # skip missing files and mpm packages if package['path'] and not isinstance(package, mpmBinaryPackage): full_path = os.path.join(CFG.MOUNT_POINT, package['path']) if os.path.exists(full_path): header = rhn_pkg.get_package_header(filename=full_path) server_packages.processPackageKeyAssociations( header, package['checksum_type'], package['checksum'])
def _import_signatures(self): for package in self.batch: # skip missing files and mpm packages if package['path'] and not isinstance(package, mpmBinaryPackage): full_path = os.path.join(CFG.MOUNT_POINT, package['path']) if os.path.exists(full_path): header = rhn_pkg.get_package_header(filename=full_path) server_packages.processPackageKeyAssociations(header, package['checksum_type'], package['checksum'])
def process_package_data(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_path_sql = rhnSQL.prepare(_get_path_query) _update_package_path = rhnSQL.prepare(_update_pkg_path_query) _get_path_sql.execute() paths = _get_path_sql.fetchall_dict() if not paths: # Nothing to change return if verbose: print "Processing %s packages" % len(paths) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', \ finalSize=len(paths), finalBarLength=40, stream=sys.stdout) pb.printAll(1) skip_list = [] new_ok_list = [] i = 0 for path in paths: pb.addTo(1) pb.printIncrement() old_path_nvrea = path['path'].split('/') org_id = old_path_nvrea[1] # pylint: disable=W0703 try: nevra = parseRPMFilename(old_path_nvrea[-1]) if nevra[1] in [None, '']: nevra[1] = path['epoch'] except Exception: # probably not an rpm skip if debug: log.writeMessage("Skipping: %s Not a valid rpm" \ % old_path_nvrea[-1]) continue old_abs_path = os.path.join(CFG.MOUNT_POINT, path['path']) checksum_type = path['checksum_type'] checksum = path['checksum'] new_path = get_package_path(nevra, org_id, prepend=old_path_nvrea[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) bad_abs_path = os.path.join(CFG.MOUNT_POINT, \ get_package_path(nevra, org_id, prepend=old_path_nvrea[0], omit_epoch = True, checksum=checksum)) if not os.path.exists(old_abs_path): if os.path.exists(new_abs_path): new_ok_list.append(new_abs_path) if debug: log.writeMessage("File %s already on final path %s" % (path['path'], new_abs_path)) old_abs_path = new_abs_path elif os.path.exists(bad_abs_path): log.writeMessage("File %s found on %s" % (path['path'], bad_abs_path)) old_abs_path = bad_abs_path else: skip_list.append(old_abs_path) if debug: log.writeMessage("Missing path %s for package %d" % (old_abs_path, path['id'])) continue # pylint: disable=W0703 try: hdr = rhn_rpm.get_package_header(filename=old_abs_path) except Exception, e: msg = "Exception occurred when reading package header %s: %s" % \ (old_abs_path, str(e)) print msg if debug: log.writeMessage(msg) rhnSQL.commit() sys.exit(1) if old_abs_path != new_abs_path: new_abs_dir = os.path.dirname(new_abs_path) # relocate the package on the filer if debug: log.writeMessage("Relocating %s to %s on filer" \ % (old_abs_path, new_abs_path)) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) shutil.move(old_abs_path, new_abs_path) # Clean up left overs os.removedirs(os.path.dirname(old_abs_path)) # make the path readable os.chmod(new_abs_path, 0644) # Update the db paths _update_package_path.execute(the_id= path['id'], \ new_path = new_path ) if debug: log.writeMessage("query Executed: update rhnPackage %d to %s" \ % ( path['id'], new_path )) # Process gpg key ids server_packages.processPackageKeyAssociations(hdr, checksum_type, checksum) if debug: log.writeMessage("gpg key info updated from %s" % new_abs_path) i = i + 1 # we need to break the transaction to smaller pieces if i % 1000 == 0: rhnSQL.commit()
def push_package(a_pkg, org_id=None, force=None, channels=[], relative_path=None): """Uploads a package""" # First write the package to the filesystem to final location try: importLib.move_package(a_pkg.payload_stream.name, basedir=CFG.MOUNT_POINT, relpath=relative_path, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, force=1) except OSError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, "Package upload failed: %s" % e), sys.exc_info()[2]) except importLib.FileConflictError: raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2]) except: raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2]) pkg = mpmSource.create_package(a_pkg.header, size=a_pkg.payload_size, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, relpath=relative_path, org_id=org_id, header_start=a_pkg.header_start, header_end=a_pkg.header_end, channels=channels) batch = importLib.Collection() batch.append(pkg) backend = SQLBackend() if force: upload_force = 4 else: upload_force = 0 importer = packageImport.packageImporter(batch, backend, source=a_pkg.header.is_source, caller="server.app.uploadPackage") importer.setUploadForce(upload_force) importer.run() package = batch[0] log_debug(5, "Package diff", package.diff) if package.diff and not force and package.diff.level > 1: # Packages too different; bail out log_debug(1, "Packages too different", package.toDict(), "Level:", package.diff.level) pdict = package.toDict() orig_path = package['path'] orig_path = os.path.join(CFG.MOUNT_POINT, orig_path) log_debug(4, "Original package", orig_path) # MPMs do not store their headers on disk, so we must avoid performing # operations which rely on information only contained in the headers # (such as header signatures). if os.path.exists(orig_path) and a_pkg.header.packaging != 'mpm': oh = rhn_pkg.get_package_header(orig_path) _diff_header_sigs(a_pkg.header, oh, pdict['diff']['diff']) return pdict, package.diff.level # Remove any pending scheduled file deletion for this package h = rhnSQL.prepare(""" delete from rhnPackageFileDeleteQueue where path = :path """) h.execute(path=relative_path) if package.diff and not force and package.diff.level: # No need to copy it - just the path is modified # pkilambi bug#180347 # case 1:check if the path exists in the db and also on the file system. # if it does then no need to copy # case2: file exists on file system but path not in db.then add the # realtive path in the db based on checksum of the pkg # case3: if no file on file system but path exists.then we write the # file to file system # case4:no file exists on FS and no path in db .then we write both. orig_path = package['path'] orig_path = os.path.join(CFG.MOUNT_POINT, orig_path) log_debug(3, "Original package", orig_path) # check included to query for source and binary rpms h_path_sql = """ select ps.path path from %s ps, rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype and ps.checksum_id = c.id and (ps.org_id = :org_id or (ps.org_id is null and :org_id is null) ) """ if a_pkg.header.is_source: h_package_table = 'rhnPackageSource' else: h_package_table = 'rhnPackage' h_path = rhnSQL.prepare(h_path_sql % h_package_table) h_path.execute(ctype=a_pkg.checksum_type, csum=a_pkg.checksum, org_id=org_id) rs_path = h_path.fetchall_dict() path_dict = {} if rs_path: path_dict = rs_path[0] if os.path.exists(orig_path) and path_dict['path']: return {}, 0 elif not path_dict['path']: h_upd = rhnSQL.prepare(""" update rhnpackage set path = :path where checksum_id = ( select id from rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype) """) h_upd.execute(path=relative_path, ctype=a_pkg.checksum_type, csum=a_pkg.checksum) # commit the transactions rhnSQL.commit() if not a_pkg.header.is_source: # Process Package Key information server_packages.processPackageKeyAssociations(a_pkg.header, a_pkg.checksum_type, a_pkg.checksum) if not a_pkg.header.is_source: errataCache.schedule_errata_cache_update(importer.affected_channels) log_debug(2, "Returning") return {}, 0
def process_package_data(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_path_sql = rhnSQL.prepare(_get_path_query) _update_package_path = rhnSQL.prepare(_update_pkg_path_query) _get_path_sql.execute() paths = _get_path_sql.fetchall_dict() if not paths: # Nothing to change return if verbose: print("Processing %s packages" % len(paths)) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=len(paths), finalBarLength=40, stream=sys.stdout) pb.printAll(1) skip_list = [] new_ok_list = [] i = 0 for path in paths: pb.addTo(1) pb.printIncrement() old_path_nvrea = path['path'].split('/') org_id = old_path_nvrea[1] # pylint: disable=W0703 try: nevra = parseRPMFilename(old_path_nvrea[-1]) if nevra[1] in [None, '']: nevra[1] = path['epoch'] except Exception: # probably not an rpm skip if debug: log.writeMessage("Skipping: %s Not a valid rpm" % old_path_nvrea[-1]) continue old_abs_path = os.path.join(CFG.MOUNT_POINT, path['path']) checksum_type = path['checksum_type'] checksum = path['checksum'] new_path = get_package_path(nevra, org_id, prepend=old_path_nvrea[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) bad_abs_path = os.path.join(CFG.MOUNT_POINT, get_package_path(nevra, org_id, prepend=old_path_nvrea[0], omit_epoch=True, checksum=checksum)) if not os.path.exists(old_abs_path): if os.path.exists(new_abs_path): new_ok_list.append(new_abs_path) if debug: log.writeMessage("File %s already on final path %s" % (path['path'], new_abs_path)) old_abs_path = new_abs_path elif os.path.exists(bad_abs_path): log.writeMessage("File %s found on %s" % (path['path'], bad_abs_path)) old_abs_path = bad_abs_path else: skip_list.append(old_abs_path) if debug: log.writeMessage("Missing path %s for package %d" % (old_abs_path, path['id'])) continue # pylint: disable=W0703 try: hdr = rhn_rpm.get_package_header(filename=old_abs_path) except Exception: e = sys.exc_info()[1] msg = "Exception occurred when reading package header %s: %s" % \ (old_abs_path, str(e)) print(msg) if debug: log.writeMessage(msg) rhnSQL.commit() sys.exit(1) if old_abs_path != new_abs_path: new_abs_dir = os.path.dirname(new_abs_path) # relocate the package on the filer if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) shutil.move(old_abs_path, new_abs_path) # Clean up left overs os.removedirs(os.path.dirname(old_abs_path)) # make the path readable os.chmod(new_abs_path, int('0644', 8)) # Update the db paths _update_package_path.execute(the_id=path['id'], new_path=new_path) if debug: log.writeMessage("query Executed: update rhnPackage %d to %s" % (path['id'], new_path)) # Process gpg key ids server_packages.processPackageKeyAssociations(hdr, checksum_type, checksum) if debug: log.writeMessage("gpg key info updated from %s" % new_abs_path) i = i + 1 # we need to break the transaction to smaller pieces if i % 1000 == 0: rhnSQL.commit() pb.printComplete() # All done, final commit rhnSQL.commit() sys.stderr.write("Transaction Committed! \n") if verbose: print(" Skipping %s packages, paths not found" % len(skip_list)) if len(new_ok_list) > 0 and verbose: print(" There were %s packages found in the correct location" % len(new_ok_list)) return
def push_package(a_pkg, org_id=None, force=None, channels=[], relative_path=None): """Uploads a package""" if relative_path: # First write the package to the filesystem to final location try: importLib.move_package( a_pkg.payload_stream.name, basedir=CFG.MOUNT_POINT, relpath=relative_path, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, force=1, ) except OSError: e = sys.exc_info()[1] raise_with_tb(rhnFault(50, "Package upload failed: %s" % e), sys.exc_info()[2]) except importLib.FileConflictError: raise_with_tb(rhnFault(50, "File already exists"), sys.exc_info()[2]) except: raise_with_tb(rhnFault(50, "File error"), sys.exc_info()[2]) # Remove any pending scheduled file deletion for this package h = rhnSQL.prepare( """ delete from rhnPackageFileDeleteQueue where path = :path """ ) h.execute(path=relative_path) pkg = mpmSource.create_package( a_pkg.header, size=a_pkg.payload_size, checksum_type=a_pkg.checksum_type, checksum=a_pkg.checksum, relpath=relative_path, org_id=org_id, header_start=a_pkg.header_start, header_end=a_pkg.header_end, channels=channels, ) batch = importLib.Collection() batch.append(pkg) backend = SQLBackend() if force: upload_force = 4 else: upload_force = 0 importer = packageImport.packageImporter( batch, backend, source=a_pkg.header.is_source, caller="server.app.uploadPackage" ) importer.setUploadForce(upload_force) importer.run() package = batch[0] log_debug(5, "Package diff", package.diff) if package.diff and not force and package.diff.level > 1: # Packages too different; bail out log_debug(1, "Packages too different", package.toDict(), "Level:", package.diff.level) pdict = package.toDict() orig_path = package["path"] orig_path = os.path.join(CFG.MOUNT_POINT, orig_path) log_debug(4, "Original package", orig_path) # MPMs do not store their headers on disk, so we must avoid performing # operations which rely on information only contained in the headers # (such as header signatures). if os.path.exists(orig_path) and a_pkg.header.packaging != "mpm": oh = rhn_pkg.get_package_header(orig_path) _diff_header_sigs(a_pkg.header, oh, pdict["diff"]["diff"]) return pdict, package.diff.level if package.diff and not force and package.diff.level: # No need to copy it - just the path is modified # pkilambi bug#180347 # case 1:check if the path exists in the db and also on the file system. # if it does then no need to copy # case2: file exists on file system but path not in db.then add the # realtive path in the db based on checksum of the pkg # case3: if no file on file system but path exists.then we write the # file to file system # case4:no file exists on FS and no path in db .then we write both. orig_path = package["path"] orig_path = os.path.join(CFG.MOUNT_POINT, orig_path) log_debug(3, "Original package", orig_path) # check included to query for source and binary rpms h_path_sql = """ select ps.path path from %s ps, rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype and ps.checksum_id = c.id and (ps.org_id = :org_id or (ps.org_id is null and :org_id is null) ) """ if a_pkg.header.is_source: h_package_table = "rhnPackageSource" else: h_package_table = "rhnPackage" h_path = rhnSQL.prepare(h_path_sql % h_package_table) h_path.execute(ctype=a_pkg.checksum_type, csum=a_pkg.checksum, org_id=org_id) rs_path = h_path.fetchall_dict() path_dict = {} if rs_path: path_dict = rs_path[0] if os.path.exists(orig_path) and path_dict["path"]: return {}, 0 elif not path_dict["path"]: h_upd = rhnSQL.prepare( """ update rhnpackage set path = :path where checksum_id = ( select id from rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype) """ ) h_upd.execute(path=relative_path, ctype=a_pkg.checksum_type, csum=a_pkg.checksum) # commit the transactions rhnSQL.commit() if not a_pkg.header.is_source: # Process Package Key information server_packages.processPackageKeyAssociations(a_pkg.header, a_pkg.checksum_type, a_pkg.checksum) if not a_pkg.header.is_source: errataCache.schedule_errata_cache_update(importer.affected_channels) log_debug(2, "Returning") return {}, 0
elif not path_dict['path']: h_upd = rhnSQL.prepare(""" update rhnpackage set path = :path where checksum_id = ( select id from rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype) """) h_upd.execute(path=relative_path, ctype=checksum_type, csum=checksum) # commit the transactions rhnSQL.commit() if not header.is_source: # Process Package Key information server_packages.processPackageKeyAssociations(header, checksum_type, checksum) if not header.is_source: errataCache.schedule_errata_cache_update(importer.affected_channels) log_debug(2, "Returning") return {}, 0 def _diff_header_sigs(h1, h2, diff_list): # XXX This can be far more complicated if we take into account that # signatures can be different h1sigs = h1.signatures h2sigs = h2.signatures if not h1sigs and not h2sigs: # No differences here return
h_upd = rhnSQL.prepare(""" update rhnpackage set path = :path where checksum_id = ( select id from rhnChecksumView c where c.checksum = :csum and c.checksum_type = :ctype) """) h_upd.execute(path=relative_path, ctype=a_pkg.checksum_type, csum=a_pkg.checksum) # commit the transactions rhnSQL.commit() if not a_pkg.header.is_source: # Process Package Key information server_packages.processPackageKeyAssociations(a_pkg.header, a_pkg.checksum_type, a_pkg.checksum) if not a_pkg.header.is_source: errataCache.schedule_errata_cache_update(importer.affected_channels) log_debug(2, "Returning") return {}, 0 def _diff_header_sigs(h1, h2, diff_list): # XXX This can be far more complicated if we take into account that # signatures can be different h1sigs = h1.signatures h2sigs = h2.signatures if not h1sigs and not h2sigs: # No differences here