def _uploadPackage(self, channels, org_id, force, info): """ Write the bits to a temporary file """ packageBits = info['package'] package_stream = tempfile.TemporaryFile() package_stream.write(packageBits) package_stream.seek(0, 0) del packageBits header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(package_stream) checksum_type = header.checksum_type() checksum = getFileChecksum(header.checksum_type(), file=payload_stream) relative_path = rhnPackageUpload.relative_path_from_header( header, org_id=org_id,checksum=checksum, checksum_type=checksum_type) package_dict, diff_level = rhnPackageUpload.push_package( header, payload_stream, checksum_type, checksum, org_id=org_id, force=force, header_start=header_start, header_end=header_end, relative_path=relative_path) if diff_level: return package_dict, diff_level return 0
def upload_package(self, package, path): temp_file = open(path, 'rb') header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(temp_file) package.checksum_type = header.checksum_type() package.checksum = getFileChecksum(package.checksum_type, file=temp_file) rel_package_path = rhnPackageUpload.relative_path_from_header( header, self.channel['org_id'], package.checksum_type, package.checksum) package_dict, diff_level = rhnPackageUpload.push_package(header, payload_stream, package.checksum_type, package.checksum, force=False, header_start=header_start, header_end=header_end, relative_path=rel_package_path, org_id=self.channel['org_id']) temp_file.close()
def handler(self, req): ret = basePackageUpload.BasePackageUpload.handler(self, req) if ret != apache.OK: return ret temp_stream = rhnPackageUpload.write_temp_file(req, 16384, self.packaging) header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(temp_stream) # Sanity check - removed, the package path can no longer be determined # without the header checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file=payload_stream) self.rel_package_path = rhnPackageUpload.relative_path_from_header( header, org_id=self.org_id, checksum_type=checksum_type, checksum=checksum) self.package_path = os.path.join(CFG.MOUNT_POINT, self.rel_package_path) # Verify the checksum of the bytes we downloaded against the checksum # presented by rhnpush in the HTTP headers if not (checksum_type == self.file_checksum_type and checksum == self.file_checksum): log_debug(1, "Mismatching checksums: expected", self.file_checksum_type, self.file_checksum, "; got:", checksum_type, checksum) raise rhnFault(104, "Mismatching information") package_dict, diff_level = rhnPackageUpload.push_package(header, payload_stream, checksum_type, checksum, force=self.force, header_start=header_start, header_end=header_end, relative_path=self.rel_package_path, org_id=self.org_id) if diff_level: return self._send_package_diff(req, diff_level, package_dict) # Everything went fine rhnSQL.commit() reply = "All OK" req.headers_out['Content-Length'] = str(len(reply)) req.send_http_header() req.write(reply) log_debug(2, "Returning with OK") return apache.OK
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print "No SHA256 capable packages to process." if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print "Processing %s SHA256 capable packages" % len(packages) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', \ finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, 0644) except OSError, e: message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print message if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError, e: message = "Error when removing %s: %s" % (old_abs_path, str(e)) print message if debug: log.writeMessage(message) sys.exit(1)
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print("No SHA256 capable packages to process.") if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print("Processing %s SHA256 capable packages" % len(packages)) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, int('0644', 8)) except OSError: e = sys.exc_info()[1] message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError: e = sys.exc_info()[1] message = "Error when removing %s: %s" % (old_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) pb.printComplete()