def repomd_up_to_date(self): repomd_old_path = os.path.join(self.repo.basecachedir, self.name, "repomd.xml") # No cached repomd? if not os.path.isfile(repomd_old_path): return False repomd_new_path = os.path.join(self.repo.basecachedir, self.name, "repomd.xml.new") # Newer file not available? Don't do anything. It should be downloaded before this. if not os.path.isfile(repomd_new_path): return True return (checksum.getFileChecksum('sha256', filename=repomd_old_path) == checksum.getFileChecksum('sha256', filename=repomd_new_path))
def copyFiles(options): """ copies SSL cert and GPG key to --pub-tree if not in there already existence check should have already been done. """ pubDir = cleanupAbsPath(options.pub_tree or DEFAULT_APACHE_PUB_DIRECTORY) def copyFile(file0, file1): if not os.path.exists(os.path.dirname(file1)): sys.stderr.write("ERROR: directory does not exist:\n %s\n" % os.path.dirname(file1)) sys.exit(errnoBadPath) if not os.path.exists(file0): sys.stderr.write("ERROR: file does not exist:\n %s\n" % file0) sys.exit(errnoCANotFound) sys.stderr.write("""\ Coping file into public directory tree: %s to %s """ % (file0, file1)) shutil.copy(file0, file1) # CA SSL cert if options.ssl_cert: writeYN = 1 dest = os.path.join(pubDir, os.path.basename(options.ssl_cert)) if os.path.dirname(options.ssl_cert) != pubDir: if os.path.isfile(dest) \ and getFileChecksum('md5', options.ssl_cert) != getFileChecksum('md5', dest): rotateFile(dest, options.verbose) elif os.path.isfile(dest): writeYN = 0 if writeYN: copyFile(options.ssl_cert, dest) # corp GPG keys if not options.no_gpg and options.gpg_key: for gpg_key in options.gpg_key.split(","): writeYN = 1 dest = os.path.join(pubDir, os.path.basename(gpg_key)) if os.path.dirname(gpg_key) != pubDir: if os.path.isfile(dest) \ and getFileChecksum('md5', gpg_key) != getFileChecksum('md5', dest): rotateFile(dest, options.verbose) elif os.path.isfile(dest): writeYN = 0 if writeYN: copyFile(gpg_key, dest)
def repomd_up_to_date(self): """ Check if repomd.xml has been updated by spacewalk. :returns: bool """ if self._md_exists('repomd'): repomd_old_path = self._retrieve_md_path('repomd') repomd_new_path = os.path.join(self._get_repodata_path(), "repomd.xml.new") # Newer file not available? Don't do anything. It should be downloaded before this. if not os.path.isfile(repomd_new_path): return True return checksum.getFileChecksum('sha256', filename=repomd_old_path) == checksum.getFileChecksum('sha256', filename=repomd_new_path) else: return False
def move_package(filename, basedir, relpath, checksum_type, checksum, force=None): """ Copies the information from the file descriptor to a file Checks the file's checksum, raising FileConflictErrror if it's different The force flag prevents the exception from being raised, and copies the file even if the checksum has changed """ packagePath = basedir + "/" + relpath # Is the file there already? if os.path.isfile(packagePath): if force: os.unlink(packagePath) else: # Get its checksum localsum = getFileChecksum(checksum_type, packagePath) if checksum == localsum: # Same file, so get outa here return raise FileConflictError(os.path.basename(packagePath)) dir = os.path.dirname(packagePath) # Create the directory where the file will reside if not os.path.exists(dir): createPath(dir) # Check if the RPM has been downloaded from a remote repository # If so, it is stored in CFG.MOUNT_POINT and we have to move it # If not, the repository is local to the server, so the rpm should be copied if filename.startswith(CFG.MOUNT_POINT): shutil.move(filename, packagePath) else: shutil.copy(filename, packagePath) # set the path perms readable by all users os.chmod(packagePath, int('0644', 8))
def __is_file_done(local_path=None, file_obj=None, checksum_type=None, checksum=None): if checksum_type and checksum: if local_path and os.path.isfile(local_path): return getFileChecksum(checksum_type, filename=local_path) == checksum elif file_obj: return getFileChecksum(checksum_type, file_obj=file_obj) == checksum if local_path and os.path.isfile(local_path): return True elif file_obj: return True return False
def rotateFile(filepath, depth=5, suffix='.', verbosity=0): """ backup/rotate a file depth (-1==no limit) refers to num. of backups (rotations) to keep. Behavior: (1) x.txt (current) x.txt.1 (old) x.txt.2 (older) x.txt.3 (oldest) (2) all file stats preserved. Doesn't blow away original file. (3) if x.txt and x.txt.1 are identical (size or checksum), None is returned """ # check argument sanity (should really be down outside of this function) if not filepath or not isinstance(filepath, type('')): raise ValueError("filepath '%s' is not a valid arguement" % filepath) if not isinstance(depth, type(0)) or depth < -1 \ or depth > MaxInt - 1 or depth == 0: raise ValueError("depth must fall within range " "[-1, 1...%s]" % (MaxInt - 1)) # force verbosity to be a numeric value verbosity = verbosity or 0 if not isinstance(verbosity, type(0)) or verbosity < -1 \ or verbosity > MaxInt - 1: raise ValueError('invalid verbosity value: %s' % (verbosity)) filepath = cleanupAbsPath(filepath) if not os.path.isfile(filepath): raise ValueError("filepath '%s' does not lead to a file" % filepath) pathNSuffix = filepath + suffix pathNSuffix1 = pathNSuffix + '1' if verbosity > 1: sys.stderr.write("Working dir: %s\n" % os.path.dirname(pathNSuffix)) # is there anything to do? (existence, then size, then checksum) checksum_type = 'sha1' if os.path.exists(pathNSuffix1) and os.path.isfile(pathNSuffix1) \ and os.stat(filepath)[6] == os.stat(pathNSuffix1)[6] \ and getFileChecksum(checksum_type, filepath) == \ getFileChecksum(checksum_type, pathNSuffix1): # nothing to do if verbosity: sys.stderr.write("File '%s' is identical to its rotation. " "Nothing to do.\n" % os.path.basename(filepath)) return None # find last in series (of rotations): last = 0 while os.path.exists('%s%d' % (pathNSuffix, last + 1)): last = last + 1 # percolate renames: for i in range(last, 0, -1): os.rename('%s%d' % (pathNSuffix, i), '%s%d' % (pathNSuffix, i + 1)) if verbosity > 1: filename = os.path.basename(pathNSuffix) sys.stderr.write("Moving file: %s%d --> %s%d\n" % (filename, i, filename, i + 1)) # blow away excess rotations: if depth != -1: last = last + 1 for i in range(depth + 1, last + 1): path = '%s%d' % (pathNSuffix, i) os.unlink(path) if verbosity: sys.stderr.write("Rotated out: '%s'\n" % ( os.path.basename(path))) # do the actual rotation shutil.copy2(filepath, pathNSuffix1) if os.path.exists(pathNSuffix1) and verbosity: sys.stderr.write("Backup made: '%s' --> '%s'\n" % (os.path.basename(filepath), os.path.basename(pathNSuffix1))) # return the full filepath of the backed up file return pathNSuffix1
def process_sha256_packages(): if debug: log = rhnLog('/var/log/rhn/update-packages.log', 5) _get_sha256_packages_sql = rhnSQL.prepare(_get_sha256_packages_query) _get_sha256_packages_sql.execute() packages = _get_sha256_packages_sql.fetchall_dict() if not packages: print("No SHA256 capable packages to process.") if debug: log.writeMessage("No SHA256 capable packages to process.") return if verbose: print(("Processing %s SHA256 capable packages" % len(packages))) pb = ProgressBar(prompt='standby: ', endTag=' - Complete!', finalSize=len(packages), finalBarLength=40, stream=sys.stdout) pb.printAll(1) _update_sha256_package_sql = rhnSQL.prepare(_update_sha256_package) _update_package_files_sql = rhnSQL.prepare(_update_package_files) for package in packages: pb.addTo(1) pb.printIncrement() old_abs_path = os.path.join(CFG.MOUNT_POINT, package['path']) if debug and verbose: log.writeMessage("Processing package: %s" % old_abs_path) temp_file = open(old_abs_path, 'rb') header, _payload_stream, _header_start, _header_end = \ rhnPackageUpload.load_package(temp_file) checksum_type = header.checksum_type() checksum = getFileChecksum(checksum_type, file_obj=temp_file) old_path = package['path'].split('/') nevra = parseRPMFilename(old_path[-1]) org_id = old_path[1] new_path = get_package_path(nevra, org_id, prepend=old_path[0], checksum=checksum) new_abs_path = os.path.join(CFG.MOUNT_POINT, new_path) # Filer content relocation try: if old_abs_path != new_abs_path: if debug: log.writeMessage("Relocating %s to %s on filer" % (old_abs_path, new_abs_path)) new_abs_dir = os.path.dirname(new_abs_path) if not os.path.isdir(new_abs_dir): os.makedirs(new_abs_dir) # link() the old path to the new path if not os.path.exists(new_abs_path): os.link(old_abs_path, new_abs_path) elif debug: log.writeMessage("File %s already exists" % new_abs_path) # Make the new path readable os.chmod(new_abs_path, int('0644', 8)) except OSError: e = sys.exc_info()[1] message = "Error when relocating %s to %s on filer: %s" % \ (old_abs_path, new_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) # Update package checksum in the database _update_sha256_package_sql.execute(ctype=checksum_type, csum=checksum, path=new_path, id=package['id']) _select_checksum_type_id_sql = rhnSQL.prepare(_select_checksum_type_id) _select_checksum_type_id_sql.execute(ctype=checksum_type) checksum_type_id = _select_checksum_type_id_sql.fetchone()[0] # Update checksum of every single file in a package for i, f in enumerate(header['filenames']): csum = header['filemd5s'][i] # Do not update checksums for directories & links if not csum: continue _update_package_files_sql.execute(ctype_id=checksum_type_id, csum=csum, pid=package['id'], filename=f) rhnSQL.commit() try: if os.path.exists(old_abs_path): os.unlink(old_abs_path) if os.path.exists(os.path.dirname(old_abs_path)): os.removedirs(os.path.dirname(old_abs_path)) except OSError: e = sys.exc_info()[1] message = "Error when removing %s: %s" % (old_abs_path, str(e)) print(message) if debug: log.writeMessage(message) sys.exit(1) pb.printComplete()