def upload_rpm(rpm_path, repoid, connector): """ upload an rpm into pulp rpm_path: path to an rpm connector: the connector to use for interacting with pulp """ ts = rpm.TransactionSet() ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) info = rpm_info(rpm_path) pkg_name = info['name'] version = info['version'] release = info['release'] epoch = info['epoch'] arch = info['arch'] nvrea = info['nvrea'] cksum = info['cksum'] size = info['size'] package_basename = info['package_basename'] juicer.utils.Log.log_notice("Expected amount to seek: %s (package size by os.path.getsize)" % size) # initiate upload upload = juicer.utils.Upload.Upload(package_basename, cksum, size, repoid, connector) #create a statusbar pbar = ProgressBar(size) # read in rpm total_seeked = 0 rpm_fd = open(rpm_path, 'rb') rpm_fd.seek(0) while total_seeked < size: rpm_data = rpm_fd.read(Constants.UPLOAD_AT_ONCE) last_offset = total_seeked total_seeked += len(rpm_data) juicer.utils.Log.log_notice("Seeked %s data... (total seeked: %s)" % (len(rpm_data), total_seeked)) upload_code = upload.append(fdata=rpm_data, offset=last_offset) if upload_code != Constants.PULP_PUT_OK: juicer.utils.Log.log_error("Upload failed.") pbar.update(len(rpm_data)) pbar.finish() rpm_fd.close() juicer.utils.Log.log_notice("Seeked total data: %s" % total_seeked) # finalize upload rpm_id = upload.import_upload(nvrea=nvrea, rpm_name=pkg_name) juicer.utils.Log.log_debug("RPM upload complete. New 'packageid': %s" % rpm_id) # clean up working dir upload.clean_upload() return rpm_id
def upload_rpm(rpm_path, repoid, connector): """ upload an rpm into pulp rpm_path: path to an rpm connector: the connector to use for interacting with pulp """ ts = rpm.TransactionSet() ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) rpm_fd = open(rpm_path, 'rb') pkg = ts.hdrFromFdno(rpm_fd) pkg_name = pkg['name'] version = pkg['version'] release = pkg['release'] epoch = 0 arch = pkg['arch'] nvrea = tuple((pkg_name, version, release, epoch, arch)) cksum = hashlib.md5(rpm_path).hexdigest() size = os.path.getsize(rpm_path) package_basename = os.path.basename(rpm_path) juicer.utils.Log.log_notice("Expected amount to seek: %s (package size by os.path.getsize)" % size) # initiate upload upload = juicer.utils.Upload.Upload(package_basename, cksum, size, repoid, connector) #create a statusbar pbar = ProgressBar(size) # read in rpm total_seeked = 0 rpm_fd.seek(0) while total_seeked < size: rpm_data = rpm_fd.read(Constants.UPLOAD_AT_ONCE) last_offset = total_seeked total_seeked += len(rpm_data) juicer.utils.Log.log_notice("Seeked %s data... (total seeked: %s)" % (len(rpm_data), total_seeked)) upload_code = upload.append(fdata=rpm_data, offset=last_offset) if upload_code != Constants.PULP_PUT_OK: juicer.utils.Log.log_error("Upload failed.") pbar.update(len(rpm_data)) pbar.finish() rpm_fd.close() juicer.utils.Log.log_notice("Seeked total data: %s" % total_seeked) # finalize upload rpm_id = upload.import_upload(nvrea=nvrea, rpm_name=pkg_name) juicer.utils.Log.log_debug("RPM upload complete. New 'packageid': %s" % rpm_id) # clean up working dir upload.clean_upload() return rpm_id
def upload_rpm(rpm_path, repoid, connector, callback=None): """upload an rpm into pulp rpm_path: path to an rpm connector: the connector to use for interacting with pulp callback: Optional callback to call after an RPM is uploaded. Callback should accept one argument, the name of the RPM which was uploaded """ ts = rpm.TransactionSet() ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) info = rpm_info(rpm_path) pkg_name = info["name"] nvrea = info["nvrea"] cksum = info["cksum"] size = info["size"] package_basename = info["package_basename"] juicer.utils.Log.log_notice("Expected amount to seek: %s (package size by os.path.getsize)" % size) # initiate upload upload = juicer.utils.Upload.Upload(package_basename, cksum, size, repoid, connector) # create a statusbar pbar = ProgressBar(size) # read in rpm total_seeked = 0 rpm_fd = open(rpm_path, "rb") rpm_fd.seek(0) while total_seeked < size: rpm_data = rpm_fd.read(Constants.UPLOAD_AT_ONCE) last_offset = total_seeked total_seeked += len(rpm_data) juicer.utils.Log.log_notice("Seeked %s data... (total seeked: %s)" % (len(rpm_data), total_seeked)) upload_code = upload.append(fdata=rpm_data, offset=last_offset) if upload_code != Constants.PULP_PUT_OK: juicer.utils.Log.log_error("Upload failed.") pbar.update(len(rpm_data)) pbar.finish() rpm_fd.close() juicer.utils.Log.log_notice("Seeked total data: %s" % total_seeked) # finalize upload rpm_id = upload.import_upload(nvrea=nvrea, rpm_name=pkg_name) juicer.utils.Log.log_debug("RPM upload complete. New 'packageid': %s" % rpm_id) # clean up working dir upload.clean_upload() # Run callbacks? if callback: try: juicer.utils.Log.log_debug("Calling upload callack: %s" % str(callback)) callback(pkg_name) except Exception: juicer.utils.Log.log_error("Exception raised in callback: %s", str(callback)) pass return rpm_id
def upload_rpm(rpm_path, repoid, connector, callback=None): """upload an rpm into pulp rpm_path: path to an rpm connector: the connector to use for interacting with pulp callback: Optional callback to call after an RPM is uploaded. Callback should accept one argument, the name of the RPM which was uploaded """ ts = rpm.TransactionSet() ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) info = rpm_info(rpm_path) pkg_name = info['name'] nvrea = info['nvrea'] cksum = info['cksum'] size = info['size'] package_basename = info['package_basename'] juicer.utils.Log.log_notice( "Expected amount to seek: %s (package size by os.path.getsize)" % size) # initiate upload upload = juicer.utils.Upload.Upload(package_basename, cksum, size, repoid, connector) #create a statusbar pbar = ProgressBar(size) # read in rpm total_seeked = 0 rpm_fd = open(rpm_path, 'rb') rpm_fd.seek(0) while total_seeked < size: rpm_data = rpm_fd.read(Constants.UPLOAD_AT_ONCE) last_offset = total_seeked total_seeked += len(rpm_data) juicer.utils.Log.log_notice("Seeked %s data... (total seeked: %s)" % (len(rpm_data), total_seeked)) upload_code = upload.append(fdata=rpm_data, offset=last_offset) if upload_code != Constants.PULP_PUT_OK: juicer.utils.Log.log_error("Upload failed.") pbar.update(len(rpm_data)) pbar.finish() rpm_fd.close() juicer.utils.Log.log_notice("Seeked total data: %s" % total_seeked) # finalize upload rpm_id = upload.import_upload(nvrea=nvrea, rpm_name=pkg_name) juicer.utils.Log.log_debug("RPM upload complete. New 'packageid': %s" % rpm_id) # clean up working dir upload.clean_upload() # Run callbacks? if callback: try: juicer.utils.Log.log_debug("Calling upload callack: %s" % str(callback)) callback(pkg_name) except Exception: juicer.utils.Log.log_error("Exception raised in callback: %s", str(callback)) pass return rpm_id
def export_repos(self, envs=[]): """Dump JuicerRepo() objects for all repos in all environments. Note that this has undefined results should a repo exist with different configurations in different environments. """ all_envs = envs juicer.utils.Log.log_notice("Only exporting repos in environment(s): %s", ", ".join(all_envs)) all_pulp_repo_names = self.list_repos(envs=all_envs) all_pulp_repo_names_uniqued = set() num_repos = 0 # Track name of all processed repos. Update when we've found # all environments a PulpRepo lives in. repos_processed = [] for env, repos in all_pulp_repo_names.iteritems(): juicer.utils.Log.log_debug("Uniqued environment: %s with %s repos", env, int(len(repos))) all_pulp_repo_names_uniqued.update(set(repos)) num_repos += len(all_pulp_repo_names_uniqued) widgets = [ "Exporting: ", progressbar.Percentage(), " ", "(", progressbar.SimpleProgress(), ") ", progressbar.ETA() ] progress_bar = JuiceBar(num_repos, widgets) # Hacky way to get around not easily being able to pass # multiple arguments to a function in a multiprocessing pool lookup_objects = [] for repo in all_pulp_repo_names_uniqued: lookup_args = juicer.admin.ThreaddedQuery.LookupObject() setattr(lookup_args, 'progress_bar', progress_bar) setattr(lookup_args, 'all_pulp_repo_names', all_pulp_repo_names) setattr(lookup_args, 'all_envs', all_envs) setattr(lookup_args, 'ja', self) setattr(lookup_args, 'pulp_repo', repo) setattr(lookup_args, 'repos_processed', repos_processed) lookup_objects.append(lookup_args) # TODO: Add the serial/concurrent logic here try: # Make our thread pool p = ThreadPool() # Get an AsyncResult object r = p.map_async(juicer.admin.ThreaddedQuery.concurrent_pulp_lookup, lookup_objects) # TODO: We should probably use p.apply_async here to avoid the crappy lookup_objects hack while not r.ready(): r.wait(1) except KeyboardInterrupt: juicer.utils.Log.log_error("User pressed ^C during repo export") juicer.utils.Log.log_error("Terminating %s worker threads and then exiting", len(p._pool)) # Prevents any more tasks from being submitted to the # pool. Once all the tasks have been completed the worker # threads will exit. #p.close() p.terminate() p.join() # XXX: End serial/concurrent logic progress_bar.finish() juicer_repos = [pr.to_juicer_repo() for pr in repos_processed] return sorted(juicer_repos, key=lambda d: d['name'].lower())