def repomd_up_to_date(self): repomd_old_path = os.path.join(self.repo.basecachedir, self.name, "repomd.xml") # No cached repomd? if not os.path.isfile(repomd_old_path): return False repomd_new_path = os.path.join(self.repo.basecachedir, self.name, "repomd.xml.new") # Newer file not available? Don't do anything. It should be downloaded before this. if not os.path.isfile(repomd_new_path): return True return (checksum.getFileChecksum('sha256', filename=repomd_old_path) == checksum.getFileChecksum('sha256', filename=repomd_new_path))
def __is_file_done(local_path=None, file_obj=None, checksum_type=None, checksum=None): if checksum_type and checksum: if local_path and os.path.isfile(local_path): return getFileChecksum(checksum_type, filename=local_path) == checksum elif file_obj: return getFileChecksum(checksum_type, file_obj=file_obj) == checksum if local_path and os.path.isfile(local_path): return True elif file_obj: return True return False
def copyFiles(options): """ copies SSL cert and GPG key to --pub-tree if not in there already existence check should have already been done. """ pubDir = cleanupAbsPath(options.pub_tree or DEFAULT_APACHE_PUB_DIRECTORY) def copyFile(file0, file1): if not os.path.exists(os.path.dirname(file1)): sys.stderr.write("ERROR: directory does not exist:\n %s\n" % os.path.dirname(file1)) sys.exit(errnoBadPath) if not os.path.exists(file0): sys.stderr.write("ERROR: file does not exist:\n %s\n" % file0) sys.exit(errnoCANotFound) sys.stderr.write("""\ Coping file into public directory tree: %s to %s """ % (file0, file1)) shutil.copy(file0, file1) # CA SSL cert if not options.no_ssl and options.ssl_cert: writeYN = 1 dest = os.path.join(pubDir, os.path.basename(options.ssl_cert)) if os.path.dirname(options.ssl_cert) != pubDir: if os.path.isfile(dest) \ and getFileChecksum('md5', options.ssl_cert) != getFileChecksum('md5', dest): rotateFile(dest, options.verbose) elif os.path.isfile(dest): writeYN = 0 if writeYN: copyFile(options.ssl_cert, dest) # corp GPG keys if not options.no_gpg and options.gpg_key: for gpg_key in options.gpg_key.split(","): writeYN = 1 dest = os.path.join(pubDir, os.path.basename(gpg_key)) if os.path.dirname(gpg_key) != pubDir: if os.path.isfile(dest) \ and getFileChecksum('md5', gpg_key) != getFileChecksum('md5', dest): rotateFile(dest, options.verbose) elif os.path.isfile(dest): writeYN = 0 if writeYN: copyFile(gpg_key, dest)
def repomd_up_to_date(self): """ Check if repomd.xml has been updated by spacewalk. :returns: bool """ if self._md_exists('repomd'): repomd_old_path = self._retrieve_md_path('repomd') repomd_new_path = os.path.join(self._get_repodata_path(), "repomd.xml.new") # Newer file not available? Don't do anything. It should be downloaded before this. if not os.path.isfile(repomd_new_path): return True return checksum.getFileChecksum('sha256', filename=repomd_old_path) == checksum.getFileChecksum('sha256', filename=repomd_new_path) else: return False
def _uploadPackage(self, channels, org_id, force, info): """ Write the bits to a temporary file """ packageBits = info['package'] package_stream = tempfile.TemporaryFile() package_stream.write(packageBits) package_stream.seek(0, 0) del packageBits header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(package_stream) checksum_type = header.checksum_type() checksum = getFileChecksum(header.checksum_type(), file=payload_stream) relative_path = rhnPackageUpload.relative_path_from_header( header, org_id=org_id,checksum=checksum, checksum_type=checksum_type) package_dict, diff_level = rhnPackageUpload.push_package( header, payload_stream, checksum_type, checksum, org_id=org_id, force=force, header_start=header_start, header_end=header_end, relative_path=relative_path) if diff_level: return package_dict, diff_level return 0
def get_package(self, pack): url = self.base_url + '/' + pack['path'] file_path = self._download(url) if getFileChecksum(pack['checksum_type'], filename=file_path) != pack['checksum']: raise IOError("Package file does not match intended download.") return file_path
def _processFile(filename, relativeDir=None, source=None, nosig=None): """ call parent _processFile and add to returned has md5sum """ info = uploadLib.UploadClass._processFile(filename, relativeDir, source, nosig) checksum = getFileChecksum('md5', filename=filename) info['md5sum'] = checksum return info
def move_package(filename, basedir, relpath, checksum_type, checksum, force=None): """ Copies the information from the file descriptor to a file Checks the file's checksum, raising FileConflictErrror if it's different The force flag prevents the exception from being raised, and copies the file even if the checksum has changed """ packagePath = basedir + "/" + relpath # Is the file there already? if os.path.isfile(packagePath): if force: os.unlink(packagePath) else: # Get its checksum localsum = getFileChecksum(checksum_type, packagePath) if checksum == localsum: # Same file, so get outa here return raise FileConflictError(os.path.basename(packagePath)) dir = os.path.dirname(packagePath) # Create the directory where the file will reside if not os.path.exists(dir): createPath(dir) # Check if the RPM has been downloaded from a remote repository # If so, it is stored in CFG.MOUNT_POINT and we have to move it # If not, the repository is local to the server, so the rpm should be copied if filename.startswith(CFG.MOUNT_POINT): shutil.move(filename, packagePath) else: shutil.copy(filename, packagePath) # set the path perms readable by all users os.chmod(packagePath, int('0644', 8))
def copy_package(fd, basedir, relpath, checksum_type, checksum, force=None): """ Copies the information from the file descriptor to a file Checks the file's checksum, raising FileConflictErrror if it's different The force flag prevents the exception from being raised, and copies the file even if the checksum has changed """ packagePath = basedir + "/" + relpath # Is the file there already? if os.path.isfile(packagePath) and not force: # Get its checksum localsum = getFileChecksum(checksum_type, packagePath) if checksum == localsum: # Same file, so get outa here return raise FileConflictError(os.path.basename(packagePath)) dir = os.path.dirname(packagePath) # Create the directory where the file will reside if not os.path.exists(dir): createPath(dir) pkgfd = os.open(packagePath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) os.lseek(fd, 0, 0) while 1: buffer = os.read(fd, 65536) if not buffer: break n = os.write(pkgfd, buffer) if n != len(buffer): # Error writing to the file raise IOError, "Wrote %s out of %s bytes in file %s" % ( n, len(buffer), packagePath) os.close(pkgfd) # set the path perms readable by all users setPermsPath(packagePath, chmod=0644)
def match_package_checksum(self, relpath, abspath, checksum_type, checksum): if os.path.exists(abspath): if relpath not in self.checksum_cache: self.checksum_cache[relpath] = {} cached_checksums = self.checksum_cache[relpath] if checksum_type not in cached_checksums: checksum_disk = getFileChecksum(checksum_type, filename=abspath) cached_checksums[checksum_type] = checksum_disk else: checksum_disk = cached_checksums[checksum_type] if checksum_disk == checksum: return 1 elif relpath in self.checksum_cache: # Remove path from cache if not exists del self.checksum_cache[relpath] return 0
def upload_package(self, package, path): temp_file = open(path, 'rb') header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(temp_file) package.checksum_type = header.checksum_type() package.checksum = getFileChecksum(package.checksum_type, file=temp_file) rel_package_path = rhnPackageUpload.relative_path_from_header( header, self.channel['org_id'], package.checksum_type, package.checksum) package_dict, diff_level = rhnPackageUpload.push_package(header, payload_stream, package.checksum_type, package.checksum, force=False, header_start=header_start, header_end=header_end, relative_path=rel_package_path, org_id=self.channel['org_id']) temp_file.close()
def handler(self, req): ret = basePackageUpload.BasePackageUpload.handler(self, req) if ret != apache.OK: return ret temp_stream = rhnPackageUpload.write_temp_file(req, 16384, self.packaging) header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(temp_stream) # Sanity check - removed, the package path can no longer be determined # without the header checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file=payload_stream) self.rel_package_path = rhnPackageUpload.relative_path_from_header( header, org_id=self.org_id, checksum_type=checksum_type, checksum=checksum) self.package_path = os.path.join(CFG.MOUNT_POINT, self.rel_package_path) # Verify the checksum of the bytes we downloaded against the checksum # presented by rhnpush in the HTTP headers if not (checksum_type == self.file_checksum_type and checksum == self.file_checksum): log_debug(1, "Mismatching checksums: expected", self.file_checksum_type, self.file_checksum, "; got:", checksum_type, checksum) raise rhnFault(104, "Mismatching information") package_dict, diff_level = rhnPackageUpload.push_package(header, payload_stream, checksum_type, checksum, force=self.force, header_start=header_start, header_end=header_end, relative_path=self.rel_package_path, org_id=self.org_id) if diff_level: return self._send_package_diff(req, diff_level, package_dict) # Everything went fine rhnSQL.commit() reply = "All OK" req.headers_out['Content-Length'] = str(len(reply)) req.send_http_header() req.write(reply) log_debug(2, "Returning with OK") return apache.OK
def _populateFromFile(self, f_path, relpath=None, org_id=None, channels=[], source=None): f_obj = file(f_path) import server.rhnPackageUpload as rhnPackageUpload header, payload_stream, header_start, header_end = \ rhnPackageUpload.load_package(f_obj) if (source and not header.is_source) or (not source and header.is_source): raise ValueError("Unexpected RPM package type") # Get the size size = os.path.getsize(f_path) path = None if relpath: # Strip trailing slashes path = "%s/%s" % (sanitizePath(relpath), os.path.basename(f_path)) checksum_type = header.checksum_type() checksum = getFileChecksum(header.checksum_type(), file=payload_stream) self.populate(header, size, checksum_type, checksum, path, org_id, header_start, header_end, channels)
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if 'org_id' in self.channel and self.channel['org_id']: ks_path += str(self.channel['org_id']) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.channel['org_id']) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return # Make sure images are included to_download = [] for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path): to_download.append(repo_path) if row: log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue parser = KSDirParser(cur_dir_html) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if repo_path not in to_download: to_download.append(repo_path) if to_download: log(0, "Downloading %d files." % len(to_download)) for item in to_download: for retry in range(3): try: log(1, "Retrieving %s" % item) plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) break except OSError: # os.stat if the file wasn't downloaded if retry < 3: log(2, "Retry download %s: attempt #%d" % (item, retry + 1)) else: raise # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "Nothing to download.") rhnSQL.commit()
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print("No SHA256 capable packages to process.") if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print("Processing %s SHA256 capable packages" % len(packages)) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, int('0644', 8)) except OSError: e = sys.exc_info()[1] message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError: e = sys.exc_info()[1] message = "Error when removing %s: %s" % (old_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) pb.printComplete()
def md5sum_for_stream(data_stream): """Calcualte the md5sum for a datastream and return it in a utf8 friendly format""" return checksum.getFileChecksum("md5", file_obj=data_stream)
def md5sum_for_stream(data_stream): """Calcualte the md5sum for a datastream and return it in a utf8 friendly format""" return checksum.getFileChecksum('md5', file_obj=data_stream)
def match_package_checksum(abspath, checksum_type, checksum): if (os.path.exists(abspath) and getFileChecksum(checksum_type, filename=abspath) == checksum): return 1 return 0
def get_package(self, pack): url = self.base_url + "/" + pack["path"] file_path = self._download(url) if getFileChecksum(pack["checksum_type"], filename=file_path) != pack["checksum"]: raise IOError("Package file does not match intended download.") return file_path
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print "No SHA256 capable packages to process." if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print "Processing %s SHA256 capable packages" % len(packages) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', \ finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, 0644) except OSError, e: message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print message if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError, e: message = "Error when removing %s: %s" % (old_abs_path, str(e)) print message if debug: log.writeMessage(message) sys.exit(1)
def rotateFile(filepath, depth=5, suffix='.', verbosity=0): """ backup/rotate a file depth (-1==no limit) refers to num. of backups (rotations) to keep. Behavior: (1) x.txt (current) x.txt.1 (old) x.txt.2 (older) x.txt.3 (oldest) (2) all file stats preserved. Doesn't blow away original file. (3) if x.txt and x.txt.1 are identical (size or checksum), None is returned """ # check argument sanity (should really be down outside of this function) if not filepath or not isinstance(filepath, type('')): raise ValueError("filepath '%s' is not a valid arguement" % filepath) if not isinstance(depth, type(0)) or depth < -1 \ or depth > MaxInt - 1 or depth == 0: raise ValueError("depth must fall within range " "[-1, 1...%s]" % (MaxInt - 1)) # force verbosity to be a numeric value verbosity = verbosity or 0 if not isinstance(verbosity, type(0)) or verbosity < -1 \ or verbosity > MaxInt - 1: raise ValueError('invalid verbosity value: %s' % (verbosity)) filepath = cleanupAbsPath(filepath) if not os.path.isfile(filepath): raise ValueError("filepath '%s' does not lead to a file" % filepath) pathNSuffix = filepath + suffix pathNSuffix1 = pathNSuffix + '1' if verbosity > 1: sys.stderr.write("Working dir: %s\n" % os.path.dirname(pathNSuffix)) # is there anything to do? (existence, then size, then checksum) checksum_type = 'sha1' if os.path.exists(pathNSuffix1) and os.path.isfile(pathNSuffix1) \ and os.stat(filepath)[6] == os.stat(pathNSuffix1)[6] \ and getFileChecksum(checksum_type, filepath) == \ getFileChecksum(checksum_type, pathNSuffix1): # nothing to do if verbosity: sys.stderr.write("File '%s' is identical to its rotation. " "Nothing to do.\n" % os.path.basename(filepath)) return None # find last in series (of rotations): last = 0 while os.path.exists('%s%d' % (pathNSuffix, last + 1)): last = last + 1 # percolate renames: for i in range(last, 0, -1): os.rename('%s%d' % (pathNSuffix, i), '%s%d' % (pathNSuffix, i + 1)) if verbosity > 1: filename = os.path.basename(pathNSuffix) sys.stderr.write("Moving file: %s%d --> %s%d\n" % (filename, i, filename, i + 1)) # blow away excess rotations: if depth != -1: last = last + 1 for i in range(depth + 1, last + 1): path = '%s%d' % (pathNSuffix, i) os.unlink(path) if verbosity: sys.stderr.write("Rotated out: '%s'\n" % ( os.path.basename(path))) # do the actual rotation shutil.copy2(filepath, pathNSuffix1) if os.path.exists(pathNSuffix1) and verbosity: sys.stderr.write("Backup made: '%s' --> '%s'\n" % (os.path.basename(filepath), os.path.basename(pathNSuffix1))) # return the full filepath of the backed up file return pathNSuffix1
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if 'org_id' in self.channel and self.channel['org_id']: ks_path += str(self.channel['org_id']) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.channel['org_id']) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file(path, os.path.join(CFG.MOUNT_POINT, ks_path)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return if self.ks_install_type is None: family = treeinfo_parser.get_family() if family == 'Fedora': self.ks_install_type = 'fedora18' elif family == 'CentOS': self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version() else: self.ks_install_type = 'generic_rpm' # Make sure images are included to_download = [] for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path): to_download.append(repo_path) if row: log(0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log(0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue parser = KSDirParser(cur_dir_html) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if repo_path not in to_download: to_download.append(repo_path) if to_download: log(0, "Downloading %d files." % len(to_download)) for item in to_download: for retry in range(3): try: log(1, "Retrieving %s" % item) plug.get_file(item, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) break except OSError: # os.stat if the file wasn't downloaded if retry < 3: log(2, "Retry download %s: attempt #%d" % (item, retry + 1)) else: raise # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum('sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "Nothing to download.") rhnSQL.commit()
def import_kickstart(self, plug, url, repo_label): ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" pxeboot_path = 'images/pxeboot/' pxeboot = plug.get_file(pxeboot_path) if pxeboot is None: if not re.search(r'/$', url): url = url + '/' self.error_msg("ERROR: kickstartable tree not detected (no %s%s)" % (url, pxeboot_path)) return if rhnSQL.fetchone_dict(""" select id from rhnKickstartableTree where org_id = :org_id and channel_id = :channel_id and label = :label """, org_id=self.channel['org_id'], channel_id=self.channel['id'], label=ks_tree_label): print "Kickstartable tree %s already synced." % ks_tree_label return row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] ks_path = 'rhn/kickstart/%s/%s' % (self.channel['org_id'], ks_tree_label) row = rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = 'externally-managed'), ( select id from rhnKSInstallType where label = 'generic_rpm'), current_timestamp, current_timestamp, current_timestamp) """, id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=os.path.join(CFG.MOUNT_POINT, ks_path), channel_id=self.channel['id']) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) dirs = [''] while len(dirs) > 0: d = dirs.pop(0) v = None if d == pxeboot_path: v = pxeboot else: v = plug.get_file(d) if v is None: continue for s in (m.group(1) for m in re.finditer(r'(?i)<a href="(.+?)"', v)): if (re.match(r'/', s) or re.search(r'\?', s) or re.search(r'\.\.', s) or re.match(r'[a-zA-Z]+:', s) or re.search(r'\.rpm$', s)): continue if re.search(r'/$', s): dirs.append(d + s) continue local_path = os.path.join(CFG.MOUNT_POINT, ks_path, d, s) if os.path.exists(local_path): print "File %s%s already present locally" % (d, s) else: print "Retrieving %s" % d + s plug.get_file(d + s, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(local_path) insert_h.execute(id=ks_id, path=d + s, checksum=getFileChecksum( 'sha256', local_path), st_size=st.st_size, st_time=st.st_mtime) rhnSQL.commit()
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if self.org_id: ks_path += str(self.org_id) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.org_id) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file( path, os.path.join(plug.repo.basecachedir, plug.name)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return if self.ks_install_type is None: family = treeinfo_parser.get_family() if family == 'Fedora': self.ks_install_type = 'fedora18' elif family == 'CentOS': self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version( ) else: self.ks_install_type = 'generic_rpm' fileutils.createPath(os.path.join(CFG.MOUNT_POINT, ks_path)) # Make sure images are included to_download = set() for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path) or self.force_kickstart: to_download.add(repo_path) if row: log( 0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.org_id, label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log( 0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir is_root = True dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue blacklist = None if is_root: blacklist = [treeinfo_parser.get_package_dir() + '/'] is_root = False parser = KSDirParser(cur_dir_html, blacklist) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if not os.path.exists( os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) or self.force_kickstart: to_download.add(repo_path) if to_download: log(0, "Downloading %d kickstart files." % len(to_download)) progress_bar = ProgressBarLogger("Downloading kickstarts:", len(to_download)) downloader = ThreadedDownloader(force=self.force_kickstart) for item in to_download: params = {} plug.set_download_parameters( params, item, os.path.join(CFG.MOUNT_POINT, ks_path, item)) downloader.add(params) downloader.set_log_obj(progress_bar) downloader.run() log2disk(0, "Download finished.") for item in to_download: st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum( 'sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "No new kickstart files to download.") # set permissions recursively rhnSQL.commit()
try: f = open(pkg) header, payload_stream = rhn_mpm.load(file=f) checksum_type = header.checksum_type() except rhn_mpm.InvalidPackageError, e: if not self.options.tolerant: self.die(-1, "ERROR: %s: This file doesn't appear to be a package" % pkg) self.warn(2, "ERROR: %s: This file doesn't appear to be a package" % pkg) continue except IOError: if not self.options.tolerant: self.die(-1, "ERROR: %s: No such file or directory available" % pkg) self.warn(2, "ERROR: %s: No such file or directory available" % pkg) continue checksum = getFileChecksum(checksum_type, file=payload_stream) digest_hash[pkg_key] = (checksum_type, checksum) f.close() for tag in ("name", "version", "release", "epoch", "arch"): val = header[tag] if val is None: val = "" pkg_info[tag] = val # b195903:the arch for srpms should be obtained by is_source check # instead of checking arch in header if header.is_source: if not self.options.source: self.die(-1, "ERROR: Trying to Push src rpm, Please re-try with --source.") if RPMTAG_NOSOURCE in header.keys(): pkg_info["arch"] = "nosrc"
def rotateFile(filepath, depth=5, suffix='.', verbosity=0): """ backup/rotate a file depth (-1==no limit) refers to num. of backups (rotations) to keep. Behavior: (1) x.txt (current) x.txt.1 (old) x.txt.2 (older) x.txt.3 (oldest) (2) all file stats preserved. Doesn't blow away original file. (3) if x.txt and x.txt.1 are identical (size or checksum), None is returned """ # check argument sanity (should really be down outside of this function) if not filepath or not isinstance(filepath, type('')): raise ValueError("filepath '%s' is not a valid arguement" % filepath) if not isinstance(depth, type(0)) or depth < -1 \ or depth > MaxInt - 1 or depth == 0: raise ValueError("depth must fall within range " "[-1, 1...%s]" % (MaxInt - 1)) # force verbosity to be a numeric value verbosity = verbosity or 0 if not isinstance(verbosity, type(0)) or verbosity < -1 \ or verbosity > MaxInt - 1: raise ValueError('invalid verbosity value: %s' % (verbosity)) filepath = cleanupAbsPath(filepath) if not os.path.isfile(filepath): raise ValueError("filepath '%s' does not lead to a file" % filepath) pathNSuffix = filepath + suffix pathNSuffix1 = pathNSuffix + '1' if verbosity > 1: sys.stderr.write("Working dir: %s\n" % os.path.dirname(pathNSuffix)) # is there anything to do? (existence, then size, then checksum) checksum_type = 'sha1' if os.path.exists(pathNSuffix1) and os.path.isfile(pathNSuffix1) \ and os.stat(filepath)[6] == os.stat(pathNSuffix1)[6] \ and getFileChecksum(checksum_type, filepath) == \ getFileChecksum(checksum_type, pathNSuffix1): # nothing to do if verbosity: sys.stderr.write("File '%s' is identical to its rotation. " "Nothing to do.\n" % os.path.basename(filepath)) return None # find last in series (of rotations): last = 0 while os.path.exists('%s%d' % (pathNSuffix, last + 1)): last = last + 1 # percolate renames: for i in range(last, 0, -1): os.rename('%s%d' % (pathNSuffix, i), '%s%d' % (pathNSuffix, i + 1)) if verbosity > 1: filename = os.path.basename(pathNSuffix) sys.stderr.write("Moving file: %s%d --> %s%d\n" % (filename, i, filename, i + 1)) # blow away excess rotations: if depth != -1: last = last + 1 for i in range(depth + 1, last + 1): path = '%s%d' % (pathNSuffix, i) os.unlink(path) if verbosity: sys.stderr.write("Rotated out: '%s'\n" % (os.path.basename(path))) # do the actual rotation shutil.copy2(filepath, pathNSuffix1) if os.path.exists(pathNSuffix1) and verbosity: sys.stderr.write( "Backup made: '%s' --> '%s'\n" % (os.path.basename(filepath), os.path.basename(pathNSuffix1))) # return the full filepath of the backed up file return pathNSuffix1
def packages(self): self.setForce() # set the org self.setOrg() # set the URL self.setURL() # set the channels self.setChannels() # set the server self.setServer() # 12/22/05 wregglej 173287 authenticate the session. self.authenticate() # Do we have the new-style handler available? # ping the server for status self.warn(2, "url is", self.url_v2) ping = rhnpush_v2.PingPackageUpload(self.url_v2, self.options.proxy) self.ping_status, errmsg, headerinfo = ping.ping() self.warn(2, "Result codes:", self.ping_status, errmsg) # move patch clusters to the end because all the patches in the cluster # have to be pushed before the cluster itself files1 = [] files2 = [] for file in self.files: if file.startswith("patch-cluster-"): files2.append(file) else: files1.append(file) self.files = files1 + files2 channel_packages = [] # a little fault tolarence is in order random.seed() checkpkgflag = 0 tries = 3 # pkilambi:check if the Sat version we are talking to has this capability. # If not use the normal way to talk to older satellites(< 4.1.0). if headerinfo.getheader("X-RHN-Check-Package-Exists"): checkpkgflag = 1 (server_digest_hash, pkgs_info, digest_hash) = self.check_package_exists() for pkg in self.files: ret = None # pkilambi:errors off as not initialized.this fixes it. # temporary fix for picking pkgs instead of full paths pkg_key = (pkg.strip()).split("/")[-1] if checkpkgflag: # it's newer satellite, compute checksum checks on client. if not server_digest_hash.has_key(pkg_key): continue checksum_type, checksum = digest = digest_hash[pkg_key] server_digest = tuple(server_digest_hash[pkg_key]) # compare checksums for existance check if server_digest == digest and not self.options.force: channel_packages.append(pkgs_info[pkg_key]) self.warn(1, "Package %s already exists on the RHN Server-- Skipping Upload...." % pkg) continue elif server_digest == (): self.warn(1, "Package %s Not Found on RHN Server -- Uploading" % pkg) elif server_digest == "on-disk" and not self.options.force: channel_packages.append(pkgs_info[pkg_key]) self.warn(0, "Package on disk but not on db -- Skipping Upload " % pkg) continue elif server_digest != digest: if self.options.force: self.warn(1, "Package checksum %s mismatch -- Forcing Upload" % pkg) else: msg = ( """Error: Package %s already exists on the server with a different checksum. Skipping upload to prevent overwriting existing package. (You may use rhnpush with the --force option to force this upload if the force_upload option is enabled on your server.)\n""" % pkg ) if not self.options.tolerant: self.die(-1, msg) self.warn(0, msg) continue else: # it's an older satellite(< 4.1.0). Just do the push the usual old way, # without checksum pre-check. try: f = open(pkg) header, payload_stream = rhn_mpm.load(file=f) checksum_type = header.checksum_type() except rhn_mpm.InvalidPackageError, e: if not self.options.tolerant: self.die(-1, "ERROR: %s: This file doesn't appear to be a package" % pkg) self.warn(2, "ERROR: %s: This file doesn't appear to be a package" % pkg) continue except IOError: if not self.options.tolerant: self.die(-1, "ERROR: %s: No such file or directory available" % pkg) self.warn(2, "ERROR: %s: No such file or directory available" % pkg) continue checksum = getFileChecksum(checksum_type, file=payload_stream) f.close()
def import_kickstart(self, plug, url, repo_label): ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" pxeboot_path = 'images/pxeboot/' pxeboot = plug.get_file(pxeboot_path) if pxeboot is None: if not re.search(r'/$', url): url = url + '/' self.print_msg("Kickstartable tree not detected (no %s%s)" % (url, pxeboot_path)) return if rhnSQL.fetchone_dict(""" select id from rhnKickstartableTree where org_id = :org_id and channel_id = :channel_id and label = :label """, org_id=self.channel['org_id'], channel_id=self.channel['id'], label=ks_tree_label): print("Kickstartable tree %s already synced." % ks_tree_label) return row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] ks_path = 'rhn/kickstart/%s/%s' % (self.channel['org_id'], ks_tree_label) row = rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = 'externally-managed'), ( select id from rhnKSInstallType where label = 'generic_rpm'), current_timestamp, current_timestamp, current_timestamp) """, id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=os.path.join(CFG.MOUNT_POINT, ks_path), channel_id=self.channel['id']) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) dirs = [''] while len(dirs) > 0: d = dirs.pop(0) v = None if d == pxeboot_path: v = pxeboot else: v = plug.get_file(d) if v is None: continue for s in (m.group(1) for m in re.finditer(r'(?i)<a href="(.+?)"', v)): if (re.match(r'/', s) or re.search(r'\?', s) or re.search(r'\.\.', s) or re.match(r'[a-zA-Z]+:', s) or re.search(r'\.rpm$', s)): continue if re.search(r'/$', s): dirs.append(d + s) continue local_path = os.path.join(CFG.MOUNT_POINT, ks_path, d, s) if os.path.exists(local_path): print("File %s%s already present locally" % (d, s)) else: print("Retrieving %s" % d + s) plug.get_file(d + s, os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(local_path) insert_h.execute(id=ks_id, path=d + s, checksum=getFileChecksum('sha256', local_path), st_size=st.st_size, st_time=st.st_mtime) rhnSQL.commit()
def import_kickstart(self, plug, url, repo_label): pxeboot_path = 'images/pxeboot/' pxeboot = plug.get_file(pxeboot_path) if pxeboot is None: if not re.search(r'/$', url): url += '/' self.print_msg("Kickstartable tree not detected (no %s%s)" % (url, pxeboot_path)) return ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if 'org_id' in self.channel and self.channel['org_id']: ks_path += str(self.channel['org_id']) + '/' + CFG.MOUNT_POINT + ks_tree_label row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.channel['org_id']) else: ks_path += ks_tree_label row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) if row: print("Kickstartable tree %s already synced with id = %d. Updating content..." % (ks_tree_label, row['id'])) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.channel['org_id'], label=ks_tree_label, base_path=ks_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) print("Added new kickstartable tree %s with id = %d. Downloading content..." % (ks_tree_label, row['id'])) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir dirs_queue = [''] while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = None if cur_dir_name == pxeboot_path: cur_dir_html = pxeboot else: cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue parser = KSDirParser() parser.feed(cur_dir_html.split('<HR>')[1]) for ks_file in parser.get_content(): # do not download rpms, they are already downloaded by self.import_packages() if re.search(r'\.rpm$', ks_file['name']) or re.search(r'\.\.', ks_file['name']): continue # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(cur_dir_name + ks_file['name']) continue else: local_path = os.path.join(CFG.MOUNT_POINT, ks_path, cur_dir_name, ks_file['name']) need_download = True if os.path.exists(local_path): t = os.path.getmtime(local_path) if ks_file['datetime'] == datetime.utcfromtimestamp(t).strftime('%d-%b-%Y %H:%M'): print("File %s%s already present locally" % (cur_dir_name, ks_file['name'])) need_download = False st = os.stat(local_path) else: os.unlink(os.path.join(CFG.MOUNT_POINT, ks_path, cur_dir_name + ks_file['name'])) if need_download: for retry in range(3): try: print("Retrieving %s" % cur_dir_name + ks_file['name']) plug.get_file(cur_dir_name + ks_file['name'], os.path.join(CFG.MOUNT_POINT, ks_path)) st = os.stat(local_path) break except OSError: # os.stat if the file wasn't downloaded if retry < 3: print("Retry download %s: attempt #%d" % (cur_dir_name + ks_file['name'], retry+1)) else: raise # update entity about current file in a database delete_h.execute(id=ks_id, path=(cur_dir_name + ks_file['name'])) insert_h.execute(id=ks_id, path=(cur_dir_name + ks_file['name']), checksum=getFileChecksum('sha256', local_path), st_size=st.st_size, st_time=st.st_mtime) rhnSQL.commit()
def _processFile(filename, relativeDir=None, source=None, nosig=None): """ Processes a file Returns a hash containing: header packageSize checksum relativePath nvrea """ # Is this a file? if not os.access(filename, os.R_OK): raise UploadError("Could not stat the file %s" % filename) if not os.path.isfile(filename): raise UploadError("%s is not a file" % filename) # Size size = os.path.getsize(filename) # Open the file f = open(filename, "r") # Read the header h = get_header(None, f.fileno(), source) (header_start, header_end) = get_header_byte_range(f) # Rewind the file f.seek(0, 0) # Compute digest checksum_type = h.checksum_type() checksum = getFileChecksum(checksum_type, file=f) f.close() if h is None: raise UploadError("%s is not a valid RPM file" % filename) if nosig is None and not h.is_signed(): raise UploadError("ERROR: %s: unsigned rpm (use --nosig to force)" % filename) # Get the name, version, release, epoch, arch lh = [] for k in ["name", "version", "release", "epoch"]: lh.append(h[k]) # Fix the epoch if lh[3] is None: lh[3] = "" else: lh[3] = str(lh[3]) if source: lh.append("src") else: lh.append(h["arch"]) # Build the header hash to be sent hash = { "header": Binary(h.unload()), "checksum_type": checksum_type, "checksum": checksum, "packageSize": size, "header_start": header_start, "header_end": header_end, } if relativeDir: # Append the relative dir too hash["relativePath"] = "%s/%s" % (relativeDir, os.path.basename(filename)) hash["nvrea"] = tuple(lh) return hash