def copy_package(fd, basedir, relpath, checksum_type, checksum, force=None): """ Copies the information from the file descriptor to a file Checks the file's checksum, raising FileConflictErrror if it's different The force flag prevents the exception from being raised, and copies the file even if the checksum has changed """ packagePath = basedir + "/" + relpath # Is the file there already? if os.path.isfile(packagePath) and not force: # Get its checksum localsum = getFileChecksum(checksum_type, packagePath) if checksum == localsum: # Same file, so get outa here return raise FileConflictError(os.path.basename(packagePath)) dir = os.path.dirname(packagePath) # Create the directory where the file will reside if not os.path.exists(dir): createPath(dir) pkgfd = os.open(packagePath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) os.lseek(fd, 0, 0) while 1: buffer = os.read(fd, 65536) if not buffer: break n = os.write(pkgfd, buffer) if n != len(buffer): # Error writing to the file raise IOError, "Wrote %s out of %s bytes in file %s" % ( n, len(buffer), packagePath) os.close(pkgfd) # set the path perms readable by all users setPermsPath(packagePath, chmod=0644)
def _getFile(self, create=0): path = os.path.join(self._getDir(create), self.id, self.relative_path) dirname = os.path.dirname(path) if create and not os.path.isdir(dirname): createPath(dirname) return path
def move_package(filename, basedir, relpath, checksum_type, checksum, force=None): """ Copies the information from the file descriptor to a file Checks the file's checksum, raising FileConflictErrror if it's different The force flag prevents the exception from being raised, and copies the file even if the checksum has changed """ packagePath = basedir + "/" + relpath # Is the file there already? if os.path.isfile(packagePath): if force: os.unlink(packagePath) else: # Get its checksum localsum = getFileChecksum(checksum_type, packagePath) if checksum == localsum: # Same file, so get outa here return raise FileConflictError(os.path.basename(packagePath)) dir = os.path.dirname(packagePath) # Create the directory where the file will reside if not os.path.exists(dir): createPath(dir) # Check if the RPM has been downloaded from a remote repository # If so, it is stored in CFG.MOUNT_POINT and we have to move it # If not, the repository is local to the server, so the rpm should be copied if filename.startswith(CFG.MOUNT_POINT): shutil.move(filename, packagePath) else: shutil.copy(filename, packagePath) # set the path perms readable by all users os.chmod(packagePath, int('0644', 8))
def _getDir(self, create=0): dirname = "%s/%s" % (self.mountPoint, self.subdir) if not create: return dirname if not os.path.exists(dirname): createPath(dirname) if not os.path.isdir(dirname): raise MissingXmlDiskSourceDirError("%s is not a directory" % dirname) return dirname
def write_file(self, stream_in): """Writes the contents of stream_in to the filesystem Returns the file size(success) or raises FileCreationError""" dirname = os.path.dirname(self.full_path) createPath(dirname) stat = os.statvfs(dirname) f_bsize = stat[0] # file system block size # misa: it's kind of icky whether to use f_bfree (free blocks) or # f_bavail (free blocks for non-root). f_bavail is more correct, since # you don't want to have the system out of disk space because of # satsync; but people would get confused when looking at the output of # df f_bavail = stat[4] # free blocks freespace = f_bsize * float(f_bavail) if self.file_size is not None and self.file_size > freespace: msg = messages.not_enough_diskspace % (freespace / 1024) log(-1, msg, stream=sys.stderr) # pkilambi: As the metadata download does'nt check for unfetched rpms # abort the sync when it runs out of disc space sys.exit(-1) #raise FileCreationError(msg) if freespace < 5000 * 1024: # arbitrary msg = messages.not_enough_diskspace % (freespace / 1024) log(-1, msg, stream=sys.stderr) # pkilambi: As the metadata download does'nt check for unfetched rpms # abort the sync when it runs out of disc space sys.exit(-1) #raise FileCreationError(msg) fout = open(self.full_path, 'wb') # setting file permissions; NOTE: rhnpush uses apache to write to disk, # hence the 6 setting. setPermsPath(self.full_path, user='******', group='apache', chmod=0644) size = 0 try: while 1: buf = stream_in.read(self.buffer_size) if not buf: break buf_len = len(buf) fout.write(buf) size = size + buf_len except IOError, e: msg = "IOError: %s" % e log(-1, msg, stream=sys.stderr) # Try not to leave garbage around try: os.unlink(self.full_path) except (OSError, IOError): pass raise FileCreationError(msg), None, sys.exc_info()[2]
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return "%s/suse_subscriptions.xml" % dirname
def sync(self, update_repodata=True): """Trigger a reposync""" failed_packages = 0 sync_error = 0 if not self.urls: sync_error = -1 start_time = datetime.now() for (repo_id, url, repo_label) in self.urls: log(0, "Repo URL: %s" % url) plugin = None # If the repository uses a uln:// URL, switch to the ULN plugin, overriding the command-line if url.startswith("uln://"): self.repo_plugin = self.load_plugin("uln") # pylint: disable=W0703 try: if repo_label: repo_name = repo_label else: # use modified relative_url as name of repo plugin, because # it used as name of cache directory as well relative_url = '_'.join(url.split('://')[1].split('/')[1:]) repo_name = relative_url.replace("?", "_").replace( "&", "_").replace("=", "_") plugin = self.repo_plugin(url, repo_name, org=str(self.org_id or ''), channel_label=self.channel_label) if update_repodata: plugin.clear_cache() if repo_id is not None: keys = rhnSQL.fetchall_dict(""" select k1.key as ca_cert, k2.key as client_cert, k3.key as client_key from rhncontentsource cs inner join rhncontentsourcessl csssl on cs.id = csssl.content_source_id inner join rhncryptokey k1 on csssl.ssl_ca_cert_id = k1.id left outer join rhncryptokey k2 on csssl.ssl_client_cert_id = k2.id left outer join rhncryptokey k3 on csssl.ssl_client_key_id = k3.id where cs.id = :repo_id """, repo_id=int(repo_id)) if keys: ssl_set = get_single_ssl_set( keys, check_dates=self.check_ssl_dates) if ssl_set: plugin.set_ssl_options(ssl_set['ca_cert'], ssl_set['client_cert'], ssl_set['client_key']) else: raise ValueError( "No valid SSL certificates were found for repository." ) if not self.no_packages: ret = self.import_packages(plugin, repo_id, url) failed_packages += ret self.import_groups(plugin, url) if not self.no_errata: self.import_updates(plugin, url) # only for repos obtained from the DB if self.sync_kickstart and repo_label: try: self.import_kickstart(plugin, repo_label) except: rhnSQL.rollback() raise except Exception: e = sys.exc_info()[1] log2(0, 0, "ERROR: %s" % e, stream=sys.stderr) log2disk(0, "ERROR: %s" % e) # pylint: disable=W0104 sync_error = -1 if plugin is not None: plugin.clear_ssl_cache() # Update cache with package checksums rhnCache.set(checksum_cache_filename, self.checksum_cache) if self.regen: taskomatic.add_to_repodata_queue_for_channel_package_subscription( [self.channel_label], [], "server.app.yumreposync") taskomatic.add_to_erratacache_queue(self.channel_label) self.update_date() rhnSQL.commit() # update permissions fileutils.createPath(os.path.join( CFG.MOUNT_POINT, 'rhn')) # if the directory exists update ownership only for root, dirs, files in os.walk(os.path.join(CFG.MOUNT_POINT, 'rhn')): for d in dirs: fileutils.setPermsPath(os.path.join(root, d), group='apache') for f in files: fileutils.setPermsPath(os.path.join(root, f), group='apache') elapsed_time = datetime.now() - start_time log( 0, "Sync of channel completed in %s." % str(elapsed_time).split('.')[0]) # if there is no global problems, but some packages weren't synced if sync_error == 0 and failed_packages > 0: sync_error = failed_packages return elapsed_time, sync_error
def import_kickstart(self, plug, repo_label): ks_path = 'rhn/kickstart/' ks_tree_label = re.sub(r'[^-_0-9A-Za-z@.]', '', repo_label.replace(' ', '_')) if len(ks_tree_label) < 4: ks_tree_label += "_repo" # construct ks_path and check we already have this KS tree synced id_request = """ select id from rhnKickstartableTree where channel_id = :channel_id and label = :label """ if self.org_id: ks_path += str(self.org_id) + '/' + ks_tree_label # Trees synced from external repositories are expected to have full path it database db_path = os.path.join(CFG.MOUNT_POINT, ks_path) row = rhnSQL.fetchone_dict(id_request + " and org_id = :org_id", channel_id=self.channel['id'], label=ks_tree_label, org_id=self.org_id) else: ks_path += ks_tree_label db_path = ks_path row = rhnSQL.fetchone_dict(id_request + " and org_id is NULL", channel_id=self.channel['id'], label=ks_tree_label) treeinfo_path = ['treeinfo', '.treeinfo'] treeinfo_parser = None for path in treeinfo_path: log(1, "Trying " + path) treeinfo = plug.get_file( path, os.path.join(plug.repo.basecachedir, plug.name)) if treeinfo: try: treeinfo_parser = TreeInfoParser(treeinfo) break except TreeInfoError: pass if not treeinfo_parser: log(0, "Kickstartable tree not detected (no valid treeinfo file)") return if self.ks_install_type is None: family = treeinfo_parser.get_family() if family == 'Fedora': self.ks_install_type = 'fedora18' elif family == 'CentOS': self.ks_install_type = 'rhel_' + treeinfo_parser.get_major_version( ) else: self.ks_install_type = 'generic_rpm' fileutils.createPath(os.path.join(CFG.MOUNT_POINT, ks_path)) # Make sure images are included to_download = set() for repo_path in treeinfo_parser.get_images(): local_path = os.path.join(CFG.MOUNT_POINT, ks_path, repo_path) # TODO: better check if not os.path.exists(local_path) or self.force_kickstart: to_download.add(repo_path) if row: log( 0, "Kickstartable tree %s already synced. Updating content..." % ks_tree_label) ks_id = row['id'] else: row = rhnSQL.fetchone_dict(""" select sequence_nextval('rhn_kstree_id_seq') as id from dual """) ks_id = row['id'] rhnSQL.execute(""" insert into rhnKickstartableTree (id, org_id, label, base_path, channel_id, kstree_type, install_type, last_modified, created, modified) values (:id, :org_id, :label, :base_path, :channel_id, ( select id from rhnKSTreeType where label = :ks_tree_type), ( select id from rhnKSInstallType where label = :ks_install_type), current_timestamp, current_timestamp, current_timestamp)""", id=ks_id, org_id=self.org_id, label=ks_tree_label, base_path=db_path, channel_id=self.channel['id'], ks_tree_type=self.ks_tree_type, ks_install_type=self.ks_install_type) log( 0, "Added new kickstartable tree %s. Downloading content..." % ks_tree_label) insert_h = rhnSQL.prepare(""" insert into rhnKSTreeFile (kstree_id, relative_filename, checksum_id, file_size, last_modified, created, modified) values (:id, :path, lookup_checksum('sha256', :checksum), :st_size, epoch_seconds_to_timestamp_tz(:st_time), current_timestamp, current_timestamp) """) delete_h = rhnSQL.prepare(""" delete from rhnKSTreeFile where kstree_id = :id and relative_filename = :path """) # Downloading/Updating content of KS Tree # start from root dir is_root = True dirs_queue = [''] log(0, "Gathering all files in kickstart repository...") while len(dirs_queue) > 0: cur_dir_name = dirs_queue.pop(0) cur_dir_html = plug.get_file(cur_dir_name) if cur_dir_html is None: continue blacklist = None if is_root: blacklist = [treeinfo_parser.get_package_dir() + '/'] is_root = False parser = KSDirParser(cur_dir_html, blacklist) for ks_file in parser.get_content(): repo_path = cur_dir_name + ks_file['name'] # if this is a directory, just add a name into queue (like BFS algorithm) if ks_file['type'] == 'DIR': dirs_queue.append(repo_path) continue if not os.path.exists( os.path.join(CFG.MOUNT_POINT, ks_path, repo_path)) or self.force_kickstart: to_download.add(repo_path) if to_download: log(0, "Downloading %d kickstart files." % len(to_download)) progress_bar = ProgressBarLogger("Downloading kickstarts:", len(to_download)) downloader = ThreadedDownloader(force=self.force_kickstart) for item in to_download: params = {} plug.set_download_parameters( params, item, os.path.join(CFG.MOUNT_POINT, ks_path, item)) downloader.add(params) downloader.set_log_obj(progress_bar) downloader.run() log2disk(0, "Download finished.") for item in to_download: st = os.stat(os.path.join(CFG.MOUNT_POINT, ks_path, item)) # update entity about current file in a database delete_h.execute(id=ks_id, path=item) insert_h.execute(id=ks_id, path=item, checksum=getFileChecksum( 'sha256', os.path.join(CFG.MOUNT_POINT, ks_path, item)), st_size=st.st_size, st_time=st.st_mtime) else: log(0, "No new kickstart files to download.") # set permissions recursively rhnSQL.commit()
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname, logging=0) return os.path.join(dirname, self.id) + '.xml'
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return "%s/suse_upgrade_paths.xml" % dirname
def _getFile(self, create=0): dirname = "%s/%s" % (self._getDir(create), self._hashID()) # Create the directoru if we have to if create and not os.path.exists(dirname): createPath(dirname) return "%s/%s%s" % (dirname, self.id, self._file_suffix)
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return "%s/product_names.xml" % dirname
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return "%s/scc_repositories.xml" % dirname
def _getFile(self, create=0): dirname = "%s/%s" % (self._getDir(create), self.channel) if create and not os.path.isdir(dirname): createPath(dirname) return os.path.join(dirname, self._file_name())
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return "%s/channel_families.xml" % dirname
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return "%s/support_info.xml" % dirname
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname, logging=0) return "%s/blacklists.xml" % dirname
def _getFile(self, create=0): dirname = self._getDir(create) if create and not os.path.isdir(dirname): createPath(dirname) return os.path.join(dirname, self.filename)