def save_fileless_units(self, file_handle, tag, process_func, mutable_type=False, additive_type=False): """ Generic method for saving units parsed from a repo metadata file where the units do not have files to store on disk. For example, groups. :param file_handle: open file-like object containing metadata :type file_handle: file :param tag: XML tag that identifies each unit :type tag: basestring :param process_func: function that processes each unit and returns a dict representing that unit's attribute names and values. The function must take one parameter, which is an ElementTree instance :type process_func: function :param mutable_type: iff True, each unit will be saved regardless of whether it already exists in the repo. this is useful for units like group and category which don't have a version, but could change :type mutable_type: bool :param additive_type: iff True, units will be updated instead of replaced. For example, if you wanted to save an errata and concatenate its package list with an existing errata, you'd set this. Note that mutable_type and additive_type are mutually exclusive. :type additive_type: bool """ if mutable_type and additive_type: raise PulpCodedException(message="The mutable_type and additive_type arguments for " "this method are mutually exclusive.") # iterate through the file and determine what we want to have package_info_generator = packages.package_list_generator(file_handle, tag, process_func) # if units aren't mutable, we don't need to attempt saving units that # we already have if not mutable_type and not additive_type: wanted = (model.as_named_tuple for model in package_info_generator) # given what we want, filter out what we already have to_save = existing.check_repo(wanted, self.sync_conduit.get_units) # rewind, iterate again through the file, and save what we need file_handle.seek(0) all_packages = packages.package_list_generator(file_handle, tag, process_func) package_info_generator = (model for model in all_packages if model.as_named_tuple in to_save) for model in package_info_generator: unit = self.sync_conduit.init_unit(model.TYPE, model.unit_key, model.metadata, None) if additive_type: existing_unit = self.sync_conduit.find_unit_by_unit_key(model.TYPE, model.unit_key) if existing_unit: unit = self._concatenate_units(existing_unit, unit) self.sync_conduit.save_unit(unit)
def download(self, metadata_files, rpms_to_download, drpms_to_download): """ Actually download the requested RPMs and DRPMs. This method iterates over the appropriate metadata file and downloads those items which are present in the corresponding set. It also checks for the RPMs and DRPMs which exist in other repositories before downloading them. If they are already downloaded, we skip the download and just associate them to the given repository. :param metadata_files: populated instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :param rpms_to_download: set of RPM.NAMEDTUPLEs :type rpms_to_download: set :param drpms_to_download: set of DRPM.NAMEDTUPLEs :type drpms_to_download: set :rtype: pulp.plugins.model.SyncReport """ # TODO: probably should make this more generic event_listener = ContentListener(self.sync_conduit, self.progress_status, self.call_config, metadata_files) primary_file_handle = metadata_files.get_metadata_file_handle(primary.METADATA_FILE_NAME) try: package_model_generator = packages.package_list_generator( primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element) units_to_download = self._filtered_unit_generator(package_model_generator, rpms_to_download) download_wrapper = alternate.Packages(self.sync_feed, self.nectar_config, units_to_download, self.tmp_dir, event_listener) # allow the downloader to be accessed by the cancel method if necessary self.downloader = download_wrapper.downloader _logger.info(_('Downloading %(num)s RPMs.') % {'num': len(rpms_to_download)}) download_wrapper.download_packages() self.downloader = None finally: primary_file_handle.close() # download DRPMs presto_file_handle = metadata_files.get_metadata_file_handle(presto.METADATA_FILE_NAME) if presto_file_handle: try: package_model_generator = packages.package_list_generator( presto_file_handle, presto.PACKAGE_TAG, presto.process_package_element) units_to_download = self._filtered_unit_generator(package_model_generator, drpms_to_download) download_wrapper = packages.Packages(self.sync_feed, self.nectar_config, units_to_download, self.tmp_dir, event_listener) # allow the downloader to be accessed by the cancel method if necessary self.downloader = download_wrapper.downloader _logger.info(_('Downloading %(num)s DRPMs.') % {'num': len(drpms_to_download)}) download_wrapper.download_packages() self.downloader = None finally: presto_file_handle.close() report = self.sync_conduit.build_success_report({}, {}) return report
def test_updateinfo_unit_metadata(self): path = os.path.join(self.metadata_file_dir, REPO_DATA_DIR_NAME, UPDATE_INFO_XML_FILE_NAME) handle = open(os.path.join(DATA_DIR, 'updateinfo.xml'), 'r') generator = packages.package_list_generator( handle, 'update', updateinfo.process_package_element) erratum_unit = next(generator) # just checking self.assertEqual(erratum_unit.unit_key['errata_id'], 'RHEA-2010:9999') context = UpdateinfoXMLFileContext(self.metadata_file_dir) context._open_metadata_file_handle() context.add_unit_metadata(erratum_unit) context._close_metadata_file_handle() self.assertNotEqual(os.path.getsize(path), 0) updateinfo_handle = gzip.open(path, 'r') content = updateinfo_handle.read() updateinfo_handle.close() self.assertEqual(content.count('from="*****@*****.**"'), 1) self.assertEqual(content.count('status="final"'), 1) self.assertEqual(content.count('type="enhancements"'), 1) self.assertEqual(content.count('version="1"'), 1) self.assertEqual(content.count('<id>RHEA-2010:9999</id>'), 1) self.assertEqual(content.count('<collection short="F13PTP">'), 1) self.assertEqual(content.count('<package'), 2) self.assertEqual( content.count( '<sum type="md5">f3c197a29d9b66c5b65c5d62b25db5b4</sum>'), 1)
def _decide_rpms_to_download(self, metadata_files): """ Decide which RPMs should be downloaded based on the repo metadata and on the importer config. :param metadata_files: instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :return: tuple of (set(RPM.NAMEDTUPLEs), number of RPMs, total size in bytes) :rtype: tuple """ if ids.TYPE_ID_RPM in self.config.get(constants.CONFIG_SKIP, []): _logger.debug('skipping RPM sync') return set(), 0, 0 primary_file_handle = metadata_files.get_metadata_file_handle( primary.METADATA_FILE_NAME) try: # scan through all the metadata to decide which packages to download package_info_generator = packages.package_list_generator( primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element) wanted = self._identify_wanted_versions(package_info_generator) # check for the units that are not in the repo, but exist on the server # and associate them to the repo to_download = existing.check_all_and_associate( wanted.iterkeys(), self.conduit, self.download_deferred) count = len(to_download) size = 0 for unit in to_download: size += wanted[unit] return to_download, count, size finally: primary_file_handle.close()
def _decide_rpms_to_download(self, metadata_files): """ Decide which RPMs should be downloaded based on the repo metadata and on the importer config. :param metadata_files: instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :return: tuple of (set(RPM.NAMEDTUPLEs), number of RPMs, total size in bytes) :rtype: tuple """ if ids.TYPE_ID_RPM in self.config.get(constants.CONFIG_SKIP, []): _logger.debug('skipping RPM sync') return set(), 0, 0 primary_file_handle = metadata_files.get_metadata_file_handle(primary.METADATA_FILE_NAME) try: # scan through all the metadata to decide which packages to download package_info_generator = packages.package_list_generator( primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element) wanted = self._identify_wanted_versions(package_info_generator) # check for the units that are not in the repo, but exist on the server # and associate them to the repo to_download = existing.check_all_and_associate( wanted.iterkeys(), self.conduit, self.download_deferred) count = len(to_download) size = 0 for unit in to_download: size += wanted[unit] return to_download, count, size finally: primary_file_handle.close()
def test_rhel6_real_data(self): with open(os.path.join(os.path.dirname(__file__), "../data/RHBA-2010-0836.erratum.xml")) as f: errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.metadata.get("rights"), "Copyright 2010 Red Hat Inc") self.assertTrue(erratum.metadata.get("summary") is not None) self.assertEqual(erratum.id, "RHBA-2010:0836") self.assertEqual(erratum.metadata.get("type"), "bugfix") self.assertEqual(erratum.metadata.get("updated"), "2010-11-10 00:00:00") self.assertEqual(erratum.metadata.get("reboot_suggested"), False) self.assertEqual(erratum.metadata.get("severity"), "") rpms = erratum.rpm_search_dicts self.assertEqual(len(rpms), 4) for rpm in rpms: # make sure all of the correct keys are present model = models.RPM.from_package_info(rpm) self.assertEqual(model.checksumtype, "sha256") self.assertTrue(len(model.checksum) > 0) self.assertTrue(model.name.startswith("NetworkManager")) self.assertEqual(model.version, "0.8.1") self.assertEqual(model.release, "5.el6_0.1")
def test_rhel6_real_data(self): with open(os.path.join(os.path.dirname(__file__), '../data/RHBA-2010-0836.erratum.xml')) as f: errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.metadata.get('rights'), 'Copyright 2010 Red Hat Inc') self.assertTrue(erratum.metadata.get('summary') is not None) self.assertEqual(erratum.id, 'RHBA-2010:0836') self.assertEqual(erratum.metadata.get('type'), 'bugfix') self.assertEqual(erratum.metadata.get('updated'), '2010-11-10 00:00:00') self.assertEqual(erratum.metadata.get('reboot_suggested'), False) self.assertEqual(erratum.metadata.get('severity'), '') rpms = erratum.rpm_search_dicts self.assertEqual(len(rpms), 4) for rpm in rpms: # make sure all of the correct keys are present model = models.RPM.from_package_info(rpm) self.assertEqual(model.checksumtype, 'sha256') self.assertTrue(len(model.checksum) > 0) self.assertTrue(model.name.startswith('NetworkManager')) self.assertEqual(model.version, '0.8.1') self.assertEqual(model.release, '5.el6_0.1')
def generate_dbs(self): """ For repo data files that contain data we need to access later for each unit in the repo, generate a local db file that gives us quick read access to each unit's data. :raises PulpCodedException: if there is some inconsistency in metadata """ package_count = {} for filename, tag, process_func in ( (filelists.METADATA_FILE_NAME, filelists.PACKAGE_TAG, filelists.process_package_element), (other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element), ): with contextlib.closing(self.get_metadata_file_handle(filename)) as xml_file_handle: generator = package_list_generator(xml_file_handle, tag) db_filename = os.path.join(self.dst_dir, '%s.db' % filename) # always a New file, and open with Fast writing mode. with contextlib.closing(gdbm.open(db_filename, 'nf')) as db_file_handle: for element in generator: utils.strip_ns(element) element.attrib['pkgid'] = models.RpmBase.PKGID_TEMPLATE raw_xml = utils.element_to_raw_xml(element) unit_key, _ = process_func(element) db_key = self.generate_db_key(unit_key) db_file_handle[db_key] = raw_xml db_file_handle.sync() package_count[filename] = len(db_file_handle) self.dbs[filename] = db_filename if package_count[filelists.METADATA_FILE_NAME] != package_count[other.METADATA_FILE_NAME]: reason = ('metadata is specified for different set of packages in filelists.xml' ' and in other.xml') raise PulpCodedException(error_code=error_codes.RPM1015, reason=reason) self.rpm_count = package_count[filelists.METADATA_FILE_NAME]
def test_updateinfo_unit_metadata(self): path = os.path.join(self.metadata_file_dir, REPO_DATA_DIR_NAME, UPDATE_INFO_XML_FILE_NAME) handle = open(os.path.join(DATA_DIR, 'updateinfo.xml'), 'r') generator = packages.package_list_generator(handle, 'update', updateinfo.process_package_element) erratum_unit = next(generator) # just checking self.assertEqual(erratum_unit.unit_key['id'], 'RHEA-2010:9999') context = UpdateinfoXMLFileContext(self.metadata_file_dir) context._open_metadata_file_handle() context.add_unit_metadata(erratum_unit) context._close_metadata_file_handle() self.assertNotEqual(os.path.getsize(path), 0) updateinfo_handle = gzip.open(path, 'r') content = updateinfo_handle.read() updateinfo_handle.close() self.assertEqual(content.count('from="*****@*****.**"'), 1) self.assertEqual(content.count('status="final"'), 1) self.assertEqual(content.count('type="enhancements"'), 1) self.assertEqual(content.count('version="1"'), 1) self.assertEqual(content.count('<id>RHEA-2010:9999</id>'), 1) self.assertEqual(content.count('<collection short="F13PTP">'), 1) self.assertEqual(content.count('<package'), 2) self.assertEqual(content.count('<sum type="md5">f3c197a29d9b66c5b65c5d62b25db5b4</sum>'), 1)
def generate_dbs(self): """ For repo data files that contain data we need to access later for each unit in the repo, generate a local db file that gives us quick read access to each unit's data. """ for filename, tag, process_func in ( (filelists.METADATA_FILE_NAME, filelists.PACKAGE_TAG, filelists.process_package_element), (other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element), ): xml_file_handle = self.get_metadata_file_handle(filename) try: generator = package_list_generator(xml_file_handle, tag) db_filename = os.path.join(self.dst_dir, '%s.db' % filename) # always a New file, and open with Fast writing mode. db_file_handle = gdbm.open(db_filename, 'nf') try: for element in generator: utils.strip_ns(element) raw_xml = utils.element_to_raw_xml(element) unit_key, _ = process_func(element) db_key = self.generate_db_key(unit_key) db_file_handle[db_key] = raw_xml db_file_handle.sync() finally: db_file_handle.close() finally: xml_file_handle.close() self.dbs[filename] = db_filename
def main(): try: update_info_file_path = sys.argv[1] output_directory = sys.argv[2] except IndexError: print 'Usage: %s <update info file path> <output directory>' return os.EX_NOINPUT update_info_file_handle = open(update_info_file_path, 'r') package_list_generator = packages.package_list_generator( update_info_file_handle, 'update', updateinfo.process_package_element) with metadata.UpdateinfoXMLFileContext( output_directory) as update_info_file_context: try: for erratum_unit in package_list_generator: #pprint(erratum_unit.metadata) update_info_file_context.add_unit_metadata(erratum_unit) except: traceback.print_exc(file=sys.stderr) return os.EX_SOFTWARE return os.EX_OK
def generate_dbs(self): """ For repo data files that contain data we need to access later for each unit in the repo, generate a local db file that gives us quick read access to each unit's data. """ for filename, tag, process_func in ( (filelists.METADATA_FILE_NAME, filelists.PACKAGE_TAG, filelists.process_package_element), (other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element), ): with contextlib.closing(self.get_metadata_file_handle(filename)) as xml_file_handle: generator = package_list_generator(xml_file_handle, tag) db_filename = os.path.join(self.dst_dir, '%s.db' % filename) # always a New file, and open with Fast writing mode. with contextlib.closing(gdbm.open(db_filename, 'nf')) as db_file_handle: for element in generator: utils.strip_ns(element) element.attrib['pkgid'] = models.RpmBase.PKGID_TEMPLATE raw_xml = utils.element_to_raw_xml(element) unit_key, _ = process_func(element) db_key = self.generate_db_key(unit_key) db_file_handle[db_key] = raw_xml db_file_handle.sync() self.dbs[filename] = db_filename
def main(): try: update_info_file_path = sys.argv[1] output_directory = sys.argv[2] except IndexError: print "Usage: %s <update info file path> <output directory>" return os.EX_NOINPUT update_info_file_handle = open(update_info_file_path, "r") package_list_generator = packages.package_list_generator( update_info_file_handle, "update", updateinfo.process_package_element ) with metadata.UpdateinfoXMLFileContext(output_directory) as update_info_file_context: try: for erratum_unit in package_list_generator: # pprint(erratum_unit.metadata) update_info_file_context.add_unit_metadata(erratum_unit) except: traceback.print_exc(file=sys.stderr) return os.EX_SOFTWARE return os.EX_OK
def get_remote_units(file_function, tag, process_func): """ return a set of units (as named tuples) that are in the remote repository :param file_function: Method that returns a file handle for the units file on disk. :type file_function: function :param tag: name of the XML tag that identifies each object in the XML file :type tag: basestring :param process_func: function that takes one argument, of type xml.etree.ElementTree.Element, or the cElementTree equivalent, and returns a dictionary containing metadata about the unit :type process_func: function :return: set of named tuples representing units :rtype: set """ remote_named_tuples = set() file_handle = file_function() if file_handle is None: return set() try: package_info_generator = packages.package_list_generator(file_handle, tag, process_func) for model in package_info_generator: named_tuple = model.as_named_tuple remote_named_tuples.add(named_tuple) finally: file_handle.close() return remote_named_tuples
def _get_and_save_file_units(filename, processing_function, tag, conduit, repo): """ Given a comps.xml file, this method decides which groups/categories to get and saves the parsed units. :param filename: open file-like object containing metadata :type filename: file :param processing_function: method to use for generating the units :type processing_function: function :param tag: XML tag that identifies each unit :type tag: str :param conduit: provides access to relevant Pulp functionality :type conduit: pulp.plugins.conduits.upload.UploadConduit :param repo: The repository to import the package into :type repo: pulp.server.db.model.Repository """ repo_id = repo.repo_id process_func = functools.partial(processing_function, repo_id) package_info_generator = packages.package_list_generator( filename, tag, process_func) for model in package_info_generator: try: model.save() except NotUniqueError: model = model.__class__.objects.filter(**model.unit_key).first() repo_controller.associate_single_unit(repo, model)
def _get_and_save_file_units(filename, processing_function, tag, conduit, repo): """ Given a comps.xml file, this method decides which groups/categories to get and saves the parsed units. :param filename: open file-like object containing metadata :type filename: file :param processing_function: method to use for generating the units :type processing_function: function :param tag: XML tag that identifies each unit :type tag: str :param conduit: provides access to relevant Pulp functionality :type conduit: pulp.plugins.conduits.upload.UploadConduit :param repo: The repository to import the package into :type repo: pulp.server.db.model.Repository """ repo_id = repo.repo_id process_func = functools.partial(processing_function, repo_id) package_info_generator = packages.package_list_generator(filename, tag, process_func) for model in package_info_generator: try: model.save() except NotUniqueError: model = model.__class__.objects.filter(**model.unit_key).first() repo_controller.associate_single_unit(repo, model)
def generate_dbs(self): """ For repo data files that contain data we need to access later for each unit in the repo, generate a local db file that gives us quick read access to each unit's data. """ for filename, tag, process_func in ( (filelists.METADATA_FILE_NAME, filelists.PACKAGE_TAG, filelists.process_package_element), (other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element), ): xml_file_handle = self.get_metadata_file_handle(filename) try: generator = package_list_generator(xml_file_handle, tag) db_filename = os.path.join(self.dst_dir, "%s.db" % filename) # always a New file, and open with Fast writing mode. db_file_handle = gdbm.open(db_filename, "nf") try: for element in generator: utils.strip_ns(element) raw_xml = utils.element_to_raw_xml(element) unit_key, _ = process_func(element) db_key = self.generate_db_key(unit_key) db_file_handle[db_key] = raw_xml db_file_handle.sync() finally: db_file_handle.close() finally: xml_file_handle.close() self.dbs[filename] = db_filename
def _decide_drpms_to_download(self, metadata_files): """ Decide which DRPMs should be downloaded based on the repo metadata and on the importer config. :param metadata_files: instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :return: tuple of (set(DRPM.NAMEDTUPLEs), number of DRPMs, total size in bytes) :rtype: tuple """ if models.DRPM.TYPE in self.call_config.get(constants.CONFIG_SKIP, []): _LOGGER.debug('skipping DRPM sync') return set(), 0, 0 presto_file_handle = metadata_files.get_metadata_file_handle(presto.METADATA_FILE_NAME) if presto_file_handle: try: package_info_generator = packages.package_list_generator(presto_file_handle, presto.PACKAGE_TAG, presto.process_package_element) wanted = self._identify_wanted_versions(package_info_generator) to_download = existing.check_repo(wanted.iterkeys(), self.sync_conduit.get_units) count = len(to_download) size = 0 for unit in to_download: size += wanted[unit] finally: presto_file_handle.close() else: to_download = set() count = 0 size = 0 return to_download, count, size
def test_centos6_real_data(self): groups = packages.package_list_generator(StringIO(CENTOS6_COMPS_XML), group.GROUP_TAG, self.process_group) groups = list(groups) self.assertEqual(len(groups), 2) for model in groups: self.assertTrue(isinstance(model, models.PackageGroup)) self.assertEqual(model.repo_id, "repo1")
def download_drpms(self, metadata_files, drpms_to_download, url): """ Actually download the requested DRPMs. This method iterates over the appropriate metadata file and downloads those items which are present in the corresponding set. It also checks for the DRPMs which exist in other repositories before downloading them. If they are already downloaded, we skip the download and just associate them to the given repository. Multiple options for deltainfo files depending on the distribution so we have to go through all of them to get all the DRPMs :param metadata_files: populated instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :param drpms_to_download: set of DRPM.NAMEDTUPLEs :type drpms_to_download: set :param url: current URL we should sync :type: str """ event_listener = DRPMListener(self, metadata_files) for presto_file_name in presto.METADATA_FILE_NAMES: presto_file_handle = metadata_files.get_metadata_file_handle(presto_file_name) if presto_file_handle: try: package_model_generator = packages.package_list_generator( presto_file_handle, presto.PACKAGE_TAG, presto.process_package_element) units_to_download = self._filtered_unit_generator(package_model_generator, drpms_to_download) # Wrapped in a generator that adds entries to # the deferred (Lazy) catalog. units_to_download = self.catalog_generator(url, units_to_download) if self.download_deferred: for unit in units_to_download: unit.downloaded = False self.add_drpm_unit(metadata_files, unit) continue download_wrapper = packages.Packages( url, self.nectar_config, units_to_download, self.tmp_dir, event_listener, self._url_modify) # allow the downloader to be accessed by the cancel method if necessary self.downloader = download_wrapper.downloader _logger.info(_('Downloading %(num)s DRPMs.') % {'num': len(drpms_to_download)}) download_wrapper.download_packages() self.downloader = None finally: presto_file_handle.close()
def download_drpms(self, metadata_files, drpms_to_download, url): """ Actually download the requested DRPMs. This method iterates over the appropriate metadata file and downloads those items which are present in the corresponding set. It also checks for the DRPMs which exist in other repositories before downloading them. If they are already downloaded, we skip the download and just associate them to the given repository. Multiple options for deltainfo files depending on the distribution so we have to go through all of them to get all the DRPMs :param metadata_files: populated instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :param drpms_to_download: set of DRPM.NAMEDTUPLEs :type drpms_to_download: set :param url: current URL we should sync :type: str """ event_listener = DRPMListener(self, metadata_files) for presto_file_name in presto.METADATA_FILE_NAMES: presto_file_handle = metadata_files.get_metadata_file_handle( presto_file_name) if presto_file_handle: try: package_model_generator = packages.package_list_generator( presto_file_handle, presto.PACKAGE_TAG, presto.process_package_element) units_to_download = self._filtered_unit_generator( package_model_generator, drpms_to_download) # Wrapped in a generator that adds entries to # the deferred (Lazy) catalog. units_to_download = self.catalog_generator( url, units_to_download) if self.download_deferred: for unit in units_to_download: unit.downloaded = False self.add_drpm_unit(metadata_files, unit) continue download_wrapper = packages.Packages( url, self.nectar_config, units_to_download, self.tmp_dir, event_listener, self._url_modify) # allow the downloader to be accessed by the cancel method if necessary self.downloader = download_wrapper.downloader _logger.info( _('Downloading %(num)s DRPMs.') % {'num': len(drpms_to_download)}) download_wrapper.download_packages() self.downloader = None finally: presto_file_handle.close()
def test_fedora18_real_data(self): categories = packages.package_list_generator(StringIO(F18_COMPS_XML), group.CATEGORY_TAG, self.process_category) categories = list(categories) self.assertEqual(len(categories), 1) self.assertTrue(isinstance(categories[0], models.PackageCategory)) self.assertEqual(len(categories[0].metadata["packagegroupids"]), 5) self.assertTrue("firefox" in categories[0].metadata["packagegroupids"]) self.assertEqual(categories[0].id, "gnome-desktop-environment") self.assertEqual(categories[0].repo_id, "repo1")
def test_multiple_pkglist_multiple_collections(self): """ Test that multiple pkglist and collections in erratum are imported correctly """ erratum_xml = '<updates>' \ ' <update from="*****@*****.**" status="stable" type="security"' \ ' version="1">' \ ' <id>RHEA-2012:0055</id>' \ ' <title>Sea_Erratum</title>' \ ' <release>1</release>' \ ' <issued date="2012-01-27 16:08:06"/>' \ ' <updated date="2012-01-27 16:08:06"/>' \ ' <description>Sea_Erratum</description>' \ ' <pkglist>' \ ' <collection short="">' \ ' <name>1</name>' \ ' <package arch="noarch" epoch="0" name="shark" release="1"' \ ' src="http://www.fedoraproject.org" version="0.1">' \ ' <filename>shark-0.1-1.noarch.rpm</filename>' \ ' </package>' \ ' </collection>' \ ' <collection short="">' \ ' <name>2</name>' \ ' <package arch="noarch" epoch="0" name="walrus" release="1"' \ ' src="http://www.fedoraproject.org" version="5.21">' \ ' <filename>walrus-5.21-1.noarch.rpm</filename>' \ ' </package>' \ ' </collection>' \ ' </pkglist>' \ ' <pkglist>' \ ' <collection short="">' \ ' <name>2</name>' \ ' <package arch="noarch" epoch="0" name="penguin" release="1"' \ ' src="http://www.fedoraproject.org" version="0.9.1">' \ ' <filename>penguin-0.9.1-1.noarch.rpm</filename>' \ ' </package>' \ ' </collection>' \ ' </pkglist>' \ ' </update>' \ '</updates>' f = StringIO(erratum_xml) errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] # all collections are in pkglist self.assertEqual(len(erratum.pkglist), 3) # each collection contains one package for collection in erratum.pkglist: self.assertEqual(len(collection['packages']), 1)
def test_xml_base_overrides_base_url(self): rpms = packages.package_list_generator( StringIO(F18_XML_ALTERNATE_LOCATION), primary.PACKAGE_TAG, primary.process_package_element) rpms = list(rpms) self.assertEqual(len(rpms), 1) model = rpms[0] self.assertTrue(isinstance(model, models.RPM)) # Test the location self.assertEqual(model.base_url, 'http://www.foo.com/repo')
def test_multiple_pkglist_multiple_collections(self): """ Test that multiple pkglist and collections in erratum are imported correctly """ erratum_xml = '<updates>' \ ' <update from="*****@*****.**" status="stable" type="security"' \ ' version="1">' \ ' <id>RHEA-2012:0055</id>' \ ' <title>Sea_Erratum</title>' \ ' <release>1</release>' \ ' <issued date="2012-01-27 16:08:06"/>' \ ' <updated date="2012-01-27 16:08:06"/>' \ ' <description>Sea_Erratum</description>' \ ' <pkglist>' \ ' <collection short="">' \ ' <name>1</name>' \ ' <package arch="noarch" epoch="0" name="shark" release="1"' \ ' src="http://www.fedoraproject.org" version="0.1">' \ ' <filename>shark-0.1-1.noarch.rpm</filename>' \ ' </package>' \ ' </collection>' \ ' <collection short="">' \ ' <name>2</name>' \ ' <package arch="noarch" epoch="0" name="walrus" release="1"' \ ' src="http://www.fedoraproject.org" version="5.21">' \ ' <filename>walrus-5.21-1.noarch.rpm</filename>' \ ' </package>' \ ' </collection>' \ ' </pkglist>' \ ' <pkglist>' \ ' <collection short="">' \ ' <name>2</name>' \ ' <package arch="noarch" epoch="0" name="penguin" release="1"' \ ' src="http://www.fedoraproject.org" version="0.9.1">' \ ' <filename>penguin-0.9.1-1.noarch.rpm</filename>' \ ' </package>' \ ' </collection>' \ ' </pkglist>' \ ' </update>' \ '</updates>' f = StringIO(erratum_xml) errata = packages.package_list_generator( f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] # all collections are in pkglist self.assertEqual(len(erratum.pkglist), 3) # each collection contains one package for collection in erratum.pkglist: self.assertEqual(len(collection['packages']), 1)
def test_centos6_real_data(self): categories = packages.package_list_generator( StringIO(CENTOS6_COMPS_XML), group.CATEGORY_TAG, self.process_category ) categories = list(categories) self.assertEqual(len(categories), 1) self.assertTrue(isinstance(categories[0], models.PackageCategory)) self.assertEqual(categories[0].repo_id, "repo1") self.assertEqual(len(categories[0].metadata["packagegroupids"]), 26) self.assertTrue("network-tools" in categories[0].metadata["packagegroupids"])
def test_xml_base_overrides_base_url(self): rpms = packages.package_list_generator(StringIO(F18_XML_ALTERNATE_LOCATION), primary.PACKAGE_TAG, primary.process_package_element) rpms = list(rpms) self.assertEqual(len(rpms), 1) model = rpms[0] self.assertTrue(isinstance(model, models.RPM)) # Test the location self.assertEqual(model.base_url, 'http://www.foo.com/repo')
def test_fedora18_real_data(self): groups = packages.package_list_generator(StringIO(F18_COMPS_XML), group.GROUP_TAG, self.process_group) groups = list(groups) self.assertEqual(len(groups), 2) for model in groups: self.assertTrue(isinstance(model, models.PackageGroup)) self.assertEqual(model.repo_id, "repo1") self.assertFalse(groups[0].metadata["user_visible"]) self.assertFalse(groups[0].metadata["default"]) self.assertTrue(groups[1].metadata["user_visible"]) self.assertFalse(groups[1].metadata["default"])
def test_updateinfo_unit_metadata_with_repo(self, mock__get_repo_unit_nevra): path = os.path.join(self.metadata_file_dir, REPO_DATA_DIR_NAME, UPDATE_INFO_XML_FILE_NAME) handle = open(os.path.join(DATA_DIR, 'updateinfo.xml'), 'r') generator = packages.package_list_generator( handle, 'update', updateinfo.process_package_element) # mock out the repo/unit nevra matcher so that only one unit in the referenced errata # is included in the output updateinfo XML mock__get_repo_unit_nevra.return_value = [ { 'name': 'patb', 'epoch': '0', 'version': '0.1', 'release': '2', 'arch': 'x86_64' }, ] erratum_unit = next(generator) # just checking self.assertEqual(erratum_unit.unit_key['errata_id'], 'RHEA-2010:9999') mock_conduit = Mock() mock_conduit.repo_id = 'mock_conduit_repo' context = UpdateinfoXMLFileContext(self.metadata_file_dir, set(), conduit=mock_conduit, checksum_type='md5') context._open_metadata_file_handle() context.add_unit_metadata(erratum_unit) context._close_metadata_file_handle() self.assertNotEqual(os.path.getsize(path), 0) updateinfo_handle = gzip.open(path, 'r') content = updateinfo_handle.read() updateinfo_handle.close() self.assertEqual(content.count('from="*****@*****.**"'), 1) self.assertEqual(content.count('status="final"'), 1) self.assertEqual(content.count('type="enhancements"'), 1) self.assertEqual(content.count('version="1"'), 1) self.assertEqual(content.count('<id>RHEA-2010:9999</id>'), 1) self.assertEqual(content.count('<collection short="F13PTP">'), 1) self.assertEqual(content.count('<package'), 1) self.assertEqual( content.count( '<sum type="md5">f3c197a29d9b66c5b65c5d62b25db5b4</sum>'), 1)
def test_rhel6_real_data(self): with open(os.path.join(DATA_DIR, 'RHBA-2010-0836.erratum.xml')) as f: errata = packages.package_list_generator( f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.metadata.get('rights'), 'Copyright 2010 Red Hat Inc') description = """NetworkManager is a system network service that manages network devices and connections, attempting to keep active network connectivity when available. It manages Ethernet, wireless, mobile broadband (WWAN), and PPPoE devices, and provides VPN integration with a variety of different VPN services. This update fixes the following bug: * Under certain circumstances, the "Enable Networking" and "Enable Wireless" menu items in the panel applet may have been insensitive. This error no longer occurs, and both options are now available as expected. (BZ#638598) Also, this update adds the following enhancements: * In enterprise wireless networks, the proactive key caching can now be used along with the PEAP-GTC authentication mechanism. * Punjabi translation of the network applet has been updated. Users are advised to upgrade to these updated packages, which resolve this issue, and add these enhancements. """ self.assertEqual(erratum.metadata.get('description'), description) self.assertTrue(erratum.metadata.get('summary') is not None) self.assertEqual(erratum.id, 'RHBA-2010:0836') self.assertEqual(erratum.metadata.get('type'), 'bugfix') self.assertEqual(erratum.metadata.get('issued'), '2010-11-10 00:00:00') self.assertEqual(erratum.metadata.get('updated'), '2010-11-10 00:00:00') self.assertEqual(erratum.metadata.get('reboot_suggested'), False) self.assertEqual(erratum.metadata.get('severity'), '') rpms = erratum.rpm_search_dicts self.assertEqual(len(rpms), 4) for rpm in rpms: # make sure all of the correct keys are present model = models.RPM.from_package_info(rpm) self.assertEqual(model.checksumtype, 'sha256') self.assertTrue(len(model.checksum) > 0) self.assertTrue(model.name.startswith('NetworkManager')) self.assertEqual(model.version, '0.8.1') self.assertEqual(model.release, '5.el6_0.1')
def test_real_data_fedora_19(self): with open(FEDORA_19_COMPS_XML) as xml_file_handle: environments = packages.package_list_generator(xml_file_handle, group.ENVIRONMENT_TAG, self.process_environment) env_count = 0 # Loop over all to ensure parsing did not throw an error for model in environments: self.assertTrue(isinstance(model, models.PackageEnvironment)) self.assertEqual(model.repo_id, 'repo1') env_count += 1 self.assertEquals(12, env_count)
def test_adds_templates(self): """ Assert that the function correctly adds templates. """ rpms = packages.package_list_generator(StringIO(F18_XML), primary.PACKAGE_TAG, primary.process_package_element) rpms = list(rpms) self.assertEqual(len(rpms), 1) model = rpms[0] self.assertTrue(model.CHECKSUM_TEMPLATE in model.raw_xml) self.assertTrue(model.CHECKSUMTYPE_TEMPLATE in model.raw_xml)
def test_real_data_fedora_19(self): with open(FEDORA_19_COMPS_XML) as xml_file_handle: environments = packages.package_list_generator( xml_file_handle, group.ENVIRONMENT_TAG, self.process_environment) env_count = 0 # Loop over all to ensure parsing did not throw an error for model in environments: self.assertTrue(isinstance(model, models.PackageEnvironment)) self.assertEqual(model.repo_id, 'repo1') env_count += 1 self.assertEquals(12, env_count)
def download_rpms(self, metadata_files, rpms_to_download, url): """ Actually download the requested RPMs. This method iterates over the appropriate metadata file and downloads those items which are present in the corresponding set. It also checks for the RPMs which exist in other repositories before downloading them. If they are already downloaded, we skip the download and just associate them to the given repository. :param metadata_files: populated instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :param rpms_to_download: set of RPM.NAMEDTUPLEs :type rpms_to_download: set :param url: current URL we should sync :type: str """ event_listener = RPMListener(self, metadata_files) primary_file_handle = metadata_files.get_metadata_file_handle(primary.METADATA_FILE_NAME) try: package_model_generator = packages.package_list_generator( primary_file_handle, primary.PACKAGE_TAG, primary.process_package_element) units_to_download = self._filtered_unit_generator(package_model_generator, rpms_to_download) # Wrapped in a generator that adds entries to # the deferred (Lazy) catalog. units_to_download = self.catalog_generator(url, units_to_download) if self.download_deferred: for unit in units_to_download: unit.downloaded = False self.add_rpm_unit(metadata_files, unit) return download_wrapper = alternate.Packages( url, self.nectar_config, units_to_download, self.tmp_dir, event_listener, self._url_modify) self.downloader = download_wrapper.downloader _logger.info(_('Downloading %(num)s RPMs.') % {'num': len(rpms_to_download)}) download_wrapper.download_packages() self.downloader = None finally: primary_file_handle.close()
def test_prestodelta_unit_metadata(self): path = os.path.join(self.metadata_file_dir, REPO_DATA_DIR_NAME, PRESTO_DELTA_FILE_NAME) handle = open(os.path.join(DATA_DIR, 'prestodelta.xml'), 'r') generator = packages.package_list_generator( handle, 'newpackage', presto.process_package_element) prestodelta_unit = next(generator) # double check we've grabbed the right one self.assertEqual(prestodelta_unit.metadata['new_package'], 'yum') self.assertEqual(prestodelta_unit.unit_key['release'], '16.fc16') context = PrestodeltaXMLFileContext(self.metadata_file_dir) context._open_metadata_file_handle() context.add_unit_metadata(prestodelta_unit) context._close_metadata_file_handle() prestodelta_handle = gzip.open(path, 'r') content = prestodelta_handle.read() prestodelta_handle.close() self.assertEqual(content.count('name="yum"'), 1) self.assertEqual(content.count('epoch="0"'), 2) # also matches oldepoch self.assertEqual(content.count('version="3.4.3"'), 2) # also matches oldversion self.assertEqual(content.count('release="16.fc16"'), 1) self.assertEqual(content.count('arch="noarch"'), 1) self.assertEqual(content.count('oldepoch="0"'), 1) self.assertEqual(content.count('oldversion="3.4.3"'), 1) self.assertEqual(content.count('oldrelease="11.fc16"'), 1) self.assertEqual( content.count( '<filename>drpms/yum-3.4.3-11.fc16_3.4.3-16.fc16.noarch.drpm</filename>' ), 1) self.assertEqual( content.count( '<sequence>yum-3.4.3-11.fc16' '-fa4535420dc8db63b7349d4262e3920b211141321242121222421212124242121272421212121212a121' '2121286272121212309f210ee210be2108e210fc210de110ae110fd110cd1108c110db110ab110fa110ca1' '109a110b9110a8110f710c710e6108610d510a510f4109410d310a310f2109210e11</sequence>' ), 1) self.assertEqual(content.count('<size>183029</size>'), 1) self.assertEqual( content.count( '<checksum ' 'type="sha256">77fad55681f652e06e8ba8fd6f11e505c4d85041ee30a37bbf8f573c4fb8f570' '</checksum>'), 1)
def test_rhel6_real_data(self): with open(os.path.join(DATA_DIR, 'RHBA-2010-0836.erratum.xml')) as f: errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.rights, 'Copyright 2010 Red Hat Inc') description = """NetworkManager is a system network service that manages network devices and connections, attempting to keep active network connectivity when available. It manages Ethernet, wireless, mobile broadband (WWAN), and PPPoE devices, and provides VPN integration with a variety of different VPN services. This update fixes the following bug: * Under certain circumstances, the "Enable Networking" and "Enable Wireless" menu items in the panel applet may have been insensitive. This error no longer occurs, and both options are now available as expected. (BZ#638598) Also, this update adds the following enhancements: * In enterprise wireless networks, the proactive key caching can now be used along with the PEAP-GTC authentication mechanism. * Punjabi translation of the network applet has been updated. Users are advised to upgrade to these updated packages, which resolve this issue, and add these enhancements. """ self.assertEqual(erratum.description, description) self.assertTrue(erratum.summary is not None) self.assertEqual(erratum.errata_id, 'RHBA-2010:0836') self.assertEqual(erratum.type, 'bugfix') self.assertEqual(erratum.issued, '2010-11-10 00:00:00') self.assertEqual(erratum.updated, '2010-11-10 00:00:00') self.assertEqual(erratum.reboot_suggested, False) self.assertEqual(erratum.severity, '') rpms = erratum.rpm_search_dicts self.assertEqual(len(rpms), 4) for rpm in rpms: # make sure all of the correct keys are present model = models.RPM(**rpm) self.assertEqual(model.checksumtype, 'sha256') self.assertTrue(len(model.checksum) > 0) self.assertTrue(model.name.startswith('NetworkManager')) self.assertEqual(model.version, '0.8.1') self.assertEqual(model.release, '5.el6_0.1')
def test_centos6_real_data(self): groups = packages.package_list_generator(StringIO(CENTOS6_COMPS_XML), group.GROUP_TAG, self.process_group) groups = list(groups) self.assertEqual(len(groups), 2) for model in groups: self.assertTrue(isinstance(model, models.PackageGroup)) self.assertEqual(model.repo_id, 'repo1') # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertTrue(model.metadata['name'] in ['Afrikaans Support', 'Albanian Support'], 'actual name: %s' % model.metadata['name']) self.assertTrue(len(model.metadata['translated_name']) > 0)
def test_prestodelta_unit_metadata(self): path = os.path.join(self.metadata_file_dir, REPO_DATA_DIR_NAME, PRESTO_DELTA_FILE_NAME) handle = open(os.path.join(DATA_DIR, 'prestodelta.xml'), 'r') generator = packages.package_list_generator(handle, 'newpackage', presto.process_package_element) prestodelta_unit = next(generator) # double check we've grabbed the right one self.assertEqual(prestodelta_unit.metadata['new_package'], 'yum') self.assertEqual(prestodelta_unit.unit_key['release'], '16.fc16') context = PrestodeltaXMLFileContext(self.metadata_file_dir) context._open_metadata_file_handle() context.add_unit_metadata(prestodelta_unit) context._close_metadata_file_handle() prestodelta_handle = gzip.open(path, 'r') content = prestodelta_handle.read() prestodelta_handle.close() self.assertEqual(content.count('name="yum"'), 1) self.assertEqual(content.count('epoch="0"'), 2) # also matches oldepoch self.assertEqual(content.count('version="3.4.3"'), 2) # also matches oldversion self.assertEqual(content.count('release="16.fc16"'), 1) self.assertEqual(content.count('arch="noarch"'), 1) self.assertEqual(content.count('oldepoch="0"'), 1) self.assertEqual(content.count('oldversion="3.4.3"'), 1) self.assertEqual(content.count('oldrelease="11.fc16"'), 1) self.assertEqual( content.count('<filename>drpms/yum-3.4.3-11.fc16_3.4.3-16.fc16.noarch.drpm</filename>'), 1) self.assertEqual(content.count( '<sequence>yum-3.4.3-11.fc16' '-fa4535420dc8db63b7349d4262e3920b211141321242121222421212124242121272421212121212a121' '2121286272121212309f210ee210be2108e210fc210de110ae110fd110cd1108c110db110ab110fa110ca1' '109a110b9110a8110f710c710e6108610d510a510f4109410d310a310f2109210e11</sequence>'), 1) self.assertEqual(content.count('<size>183029</size>'), 1) self.assertEqual(content.count( '<checksum ' 'type="sha256">77fad55681f652e06e8ba8fd6f11e505c4d85041ee30a37bbf8f573c4fb8f570' '</checksum>'), 1)
def test_centos6_real_data(self): groups = packages.package_list_generator(StringIO(CENTOS6_COMPS_XML), group.GROUP_TAG, self.process_group) groups = list(groups) self.assertEqual(len(groups), 2) for model in groups: self.assertTrue(isinstance(model, models.PackageGroup)) self.assertEqual(model.repo_id, 'repo1') # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertTrue( model.name in ['Afrikaans Support', 'Albanian Support'], 'actual name: %s' % model.name) self.assertTrue(len(model.translated_name) > 0)
def test_fedora18_real_data(self): rpms = packages.package_list_generator(StringIO(F18_SOURCE_XML), primary.PACKAGE_TAG, primary.process_package_element) rpms = list(rpms) self.assertEqual(len(rpms), 1) model = rpms[0] self.assertTrue(isinstance(model, models.SRPM)) self.assertEqual(model.name, 'openhpi-subagent') self.assertEqual(model.epoch, '0') self.assertEqual(model.version, '2.3.4') self.assertEqual(model.release, '20.fc18') self.assertEqual(model.arch, 'src') self.assertEqual(model.checksum, '2d46d2c03e36583370d203e7ae63b00cfcd739421b58f8f00a89c56ac74654fa') self.assertEqual(model.checksumtype, 'sha256')
def test_fedora18_real_data(self): rpms = packages.package_list_generator(StringIO(F18_XML), primary.PACKAGE_TAG, primary.process_package_element) rpms = list(rpms) self.assertEqual(len(rpms), 1) model = rpms[0] self.assertTrue(isinstance(model, models.RPM)) self.assertEqual(model.name, 'opensm-libs') self.assertEqual(model.epoch, '0') self.assertEqual(model.version, '3.3.15') self.assertEqual(model.release, '3.fc18') self.assertEqual(model.arch, 'x86_64') self.assertEqual(model.checksum, 'c2c85a567d1b92dd6131bd326611b162ed485f6f97583e46459b430006908d66') self.assertEqual(model.checksumtype, 'sha256')
def test_centos6_real_data(self): categories = packages.package_list_generator(StringIO(CENTOS6_COMPS_XML), group.CATEGORY_TAG, self.process_category) categories = list(categories) self.assertEqual(len(categories), 1) self.assertTrue(isinstance(categories[0], models.PackageCategory)) self.assertEqual(categories[0].repo_id, 'repo1') self.assertEqual(len(categories[0].metadata['packagegroupids']), 26) self.assertTrue('network-tools' in categories[0].metadata['packagegroupids']) # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertEqual(categories[0].metadata['description'], 'Core system components.') self.assertEqual(categories[0].metadata['name'], 'Base System') self.assertEqual(len(categories[0].metadata['translated_description']), 25) self.assertEqual(len(categories[0].metadata['translated_name']), 58) self.assertEqual(categories[0].metadata['translated_name']['de'], 'Basissystem')
def _decide_drpms_to_download(self, metadata_files): """ Decide which DRPMs should be downloaded based on the repo metadata and on the importer config. :param metadata_files: instance of MetadataFiles :type metadata_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles :return: tuple of (set(DRPM.NAMEDTUPLEs), number of DRPMs, total size in bytes) :rtype: tuple """ if models.DRPM.TYPE in self.call_config.get(constants.CONFIG_SKIP, []): _logger.debug('skipping DRPM sync') return set(), 0, 0 to_download = set() count = 0 size = 0 # multiple options for deltainfo files depending on the distribution # so we have to go through all of them for metadata_file_name in presto.METADATA_FILE_NAMES: presto_file_handle = metadata_files.get_metadata_file_handle( metadata_file_name) if presto_file_handle: try: package_info_generator = packages.package_list_generator( presto_file_handle, presto.PACKAGE_TAG, presto.process_package_element) wanted = self._identify_wanted_versions( package_info_generator) # check for the units that are already in the repo not_found_in_the_repo = existing.check_repo( wanted.iterkeys(), self.sync_conduit.get_units) # check for the units that are not in the repo, but exist on the server # and associate them to the repo to_download = existing.check_all_and_associate( not_found_in_the_repo, self.sync_conduit) count += len(to_download) for unit in to_download: size += wanted[unit] finally: presto_file_handle.close() return to_download, count, size
def test_fedora18_real_data(self): categories = packages.package_list_generator(StringIO(F18_COMPS_XML), group.CATEGORY_TAG, self.process_category) categories = list(categories) self.assertEqual(len(categories), 1) self.assertTrue(isinstance(categories[0], models.PackageCategory)) self.assertEqual(len(categories[0].metadata['packagegroupids']), 5) self.assertTrue('firefox' in categories[0].metadata['packagegroupids']) self.assertEqual(categories[0].id, 'gnome-desktop-environment') self.assertEqual(categories[0].repo_id, 'repo1') # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertEqual(categories[0].metadata['name'], 'GNOME Desktop') self.assertEqual(categories[0].metadata['description'], '\nGNOME is a highly intuitive and user friendly desktop environment.\n') self.assertEqual(len(categories[0].metadata['translated_description']), 8) self.assertEqual(len(categories[0].metadata['translated_name']), 8)
def test_scientific_linux_real_data(self): with open(os.path.join(DATA_DIR, 'scientific_linux_erratum.xml')) as f: errata = packages.package_list_generator( f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.rights, '') self.assertEqual(erratum.description, '') self.assertTrue(erratum.summary is not None) self.assertEqual(erratum.errata_id, 'SLBA-2011:1512-2') self.assertEqual(erratum.type, 'bugfix') self.assertEqual(erratum.issued, '') self.assertEqual(erratum.updated, '') self.assertEqual(erratum.reboot_suggested, False) self.assertEqual(erratum.severity, '')
def test_centos6_real_data(self): categories = packages.package_list_generator( StringIO(CENTOS6_COMPS_XML), group.CATEGORY_TAG, self.process_category) categories = list(categories) self.assertEqual(len(categories), 1) self.assertTrue(isinstance(categories[0], models.PackageCategory)) self.assertEqual(categories[0].repo_id, 'repo1') self.assertEqual(len(categories[0].packagegroupids), 26) self.assertTrue('network-tools' in categories[0].packagegroupids) # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertEqual(categories[0].description, 'Core system components.') self.assertEqual(categories[0].name, 'Base System') self.assertEqual(len(categories[0].translated_description), 25) self.assertEqual(len(categories[0].translated_name), 58) self.assertEqual(categories[0].translated_name['de'], 'Basissystem')
def test_updateinfo_repo_unit_nevra_q_filter(self, mock_rpm): # A mongoengine "QCombination" object is used to efficiently search for units # by nevra. This checks that the QCombination object is properly created based # on the errata unit parsed from the test updateinfo XML. with open(os.path.join(DATA_DIR, 'updateinfo.xml'), 'r') as handle: generator = packages.package_list_generator( handle, 'update', updateinfo.process_package_element) erratum_unit = next(generator) context = UpdateinfoXMLFileContext(self.metadata_file_dir) context._repo_unit_nevra(erratum_unit, 'mock_repo') # Call 0 to mock_rpm's filter should have one arg, which should be the QCombination # object that is built with an OR operator, with two children (one for each package # in the errata unit that was passed to the method under test. qcombination = mock_rpm.objects.filter.call_args_list[0][0][0] self.assertTrue(isinstance(qcombination, QCombination)) self.assertEqual(qcombination.operation, qcombination.OR) self.assertEqual(len(qcombination.children), 2)
def test_fedora18_real_data(self): groups = packages.package_list_generator(StringIO(F18_COMPS_XML), group.GROUP_TAG, self.process_group) groups = list(groups) self.assertEqual(len(groups), 2) for model in groups: self.assertTrue(isinstance(model, models.PackageGroup)) self.assertEqual(model.repo_id, 'repo1') self.assertFalse(groups[0].metadata['user_visible']) self.assertFalse(groups[0].metadata['default']) self.assertTrue(groups[1].metadata['user_visible']) self.assertFalse(groups[1].metadata['default']) # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertTrue(model.metadata['name'] in ['base-x', 'LibreOffice'], 'actual name: %s' % model.metadata['name']) self.assertTrue(len(groups[0].metadata['translated_description']) > 0) self.assertTrue(len(groups[0].metadata['translated_name']) > 0)
def _add_packages(conduit, base_url, md_files): """ Add package (rpm) entries to the catalog. :param conduit: Access to pulp platform API. :type conduit: pulp.server.plugins.conduits.cataloger.CatalogerConduit :param base_url: The base download URL. :type base_url: str :param md_files: The metadata files object. :type md_files: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles """ fp = md_files.get_metadata_file_handle(primary.METADATA_FILE_NAME) try: _packages = packages.package_list_generator( fp, primary.PACKAGE_TAG, primary.process_package_element) for model in _packages: unit_key = model.unit_key url = urljoin(base_url, model.download_path) conduit.add_entry(model._content_type_id, unit_key, url) finally: fp.close()
def test_link_errata_to_rpms(self): # Setup mock_conduit = mock.MagicMock() mock_conduit.get_units.return_value = ['a', 'b'] sample_errata_file = os.path.join(DATA_DIR, 'RHBA-2010-0836.erratum.xml') with open(sample_errata_file) as f: errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata)[0] errata_unit = Unit(models.Errata.TYPE, errata.unit_key, errata.metadata, None) # Test upload._link_errata_to_rpms(mock_conduit, errata, errata_unit) # Verify self.assertEqual(2, mock_conduit.get_units.call_count) # once each for RPM and SRPM self.assertEqual(4, mock_conduit.link_unit.call_count) # twice each for RPM and SRPM
def test_scientific_linux_real_data(self): with open(os.path.join(DATA_DIR, 'scientific_linux_erratum.xml')) as f: errata = packages.package_list_generator(f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.metadata.get('rights'), '') self.assertEqual(erratum.metadata.get('description'), '') self.assertTrue(erratum.metadata.get('summary') is not None) self.assertEqual(erratum.id, 'SLBA-2011:1512-2') self.assertEqual(erratum.metadata.get('type'), 'bugfix') self.assertEqual(erratum.metadata.get('updated'), '') self.assertEqual(erratum.metadata.get('reboot_suggested'), False) self.assertEqual(erratum.metadata.get('severity'), '') rpms = erratum.rpm_search_dicts self.assertEqual(len(rpms), 14)
def test_updateinfo_repo_unit_nevra_return(self, mock_rcu, mock_rpm): # Build up the mock data as well as the expected returns nevra_fields = ('name', 'epoch', 'version', 'release', 'arch') unit1_nevra = ('n1', 'e1', 'v1', 'r1', 'a1') unit1_nevra_dict = dict(zip(nevra_fields, unit1_nevra)) unit2_nevra = ('n2', 'e2', 'v2', 'r2', 'a2') unit2_nevra_dict = dict(zip(nevra_fields, unit2_nevra)) # This is the result to the query for all units with a given nevra # The expected value is a list of tuples containing unit ids and nevra fields; mock_rpm.objects.filter().scalar.return_value = [ ('id1', ) + unit1_nevra, ('id2', ) + unit2_nevra, ] # The expected value here is a list of unit IDs from the previous query that are # associated with our mock repo. mock_rcu.objects.filter().scalar.return_value = ['id1'] # Load the updateinfo XML to get an erratum unit to process with open(os.path.join(DATA_DIR, 'updateinfo.xml'), 'r') as handle: generator = packages.package_list_generator( handle, 'update', updateinfo.process_package_element) erratum_unit = next(generator) context = UpdateinfoXMLFileContext(self.metadata_file_dir) repo_unit_nevra = context._repo_unit_nevra(erratum_unit, 'mock_repo') # Call 0 created the scalar mock, so we're interested in call 1. In this case, check # that filter was called at least once with the expected filter kwargs and values. mock_rcu.objects.filter.assert_any_call(unit_id__in=['id2', 'id1'], repo_id='mock_repo') # And finally, make sure the return value is actually good! # We made the RPM mock simulate two units known to pulp with the nevra seen in our errata. # Then, we made the RepositoryContentUnit mock simulate that only one of those units is # associated with the passed-in repo. The return value should be a list with only the # single matching unit's nevra dict in it. self.assertEqual(len(repo_unit_nevra), 1) self.assertTrue(unit1_nevra_dict in repo_unit_nevra) self.assertTrue(unit2_nevra_dict not in repo_unit_nevra)
def generate_dbs(self): """ For repo data files that contain data we need to access later for each unit in the repo, generate a local db file that gives us quick read access to each unit's data. :raises PulpCodedException: if there is some inconsistency in metadata """ package_count = {} for filename, tag, process_func in ( (filelists.METADATA_FILE_NAME, filelists.PACKAGE_TAG, filelists.process_package_element), (other.METADATA_FILE_NAME, other.PACKAGE_TAG, other.process_package_element), ): with contextlib.closing(self.get_metadata_file_handle( filename)) as xml_file_handle: generator = package_list_generator(xml_file_handle, tag) db_filename = os.path.join(self.dst_dir, '%s.db' % filename) # always a New file, and open with Fast writing mode. with contextlib.closing(gdbm.open(db_filename, 'nf')) as db_file_handle: for element in generator: utils.strip_ns(element) element.attrib['pkgid'] = models.RpmBase.PKGID_TEMPLATE raw_xml = utils.element_to_raw_xml(element) unit_key, _ = process_func(element) db_key = self.generate_db_key(unit_key) db_file_handle[db_key] = raw_xml db_file_handle.sync() package_count[filename] = len(db_file_handle) self.dbs[filename] = db_filename if package_count[filelists.METADATA_FILE_NAME] != package_count[ other.METADATA_FILE_NAME]: reason = ( 'metadata is specified for different set of packages in filelists.xml' ' and in other.xml') raise PulpCodedException(error_code=error_codes.RPM1015, reason=reason) self.rpm_count = package_count[filelists.METADATA_FILE_NAME]
def test_rhel6_real_data(self): with open(os.path.join(DATA_DIR, 'RHBA-2010-0836.erratum.xml')) as f: errata = packages.package_list_generator( f, updateinfo.PACKAGE_TAG, updateinfo.process_package_element) errata = list(errata) self.assertEqual(len(errata), 1) erratum = errata[0] self.assertTrue(isinstance(erratum, models.Errata)) self.assertEqual(erratum.rights, 'Copyright 2010 Red Hat Inc') description = """NetworkManager is a system network service that manages network devices and connections, attempting to keep active network connectivity when available. It manages Ethernet, wireless, mobile broadband (WWAN), and PPPoE devices, and provides VPN integration with a variety of different VPN services. This update fixes the following bug: * Under certain circumstances, the "Enable Networking" and "Enable Wireless" menu items in the panel applet may have been insensitive. This error no longer occurs, and both options are now available as expected. (BZ#638598) Also, this update adds the following enhancements: * In enterprise wireless networks, the proactive key caching can now be used along with the PEAP-GTC authentication mechanism. * Punjabi translation of the network applet has been updated. Users are advised to upgrade to these updated packages, which resolve this issue, and add these enhancements. """ self.assertEqual(erratum.description, description) self.assertTrue(erratum.summary is not None) self.assertEqual(erratum.errata_id, 'RHBA-2010:0836') self.assertEqual(erratum.type, 'bugfix') self.assertEqual(erratum.issued, '2010-11-10 00:00:00') self.assertEqual(erratum.updated, '2010-11-10 00:00:00') self.assertEqual(erratum.reboot_suggested, False) self.assertEqual(erratum.severity, '')
def test_fedora18_real_data(self): categories = packages.package_list_generator(StringIO(F18_COMPS_XML), group.CATEGORY_TAG, self.process_category) categories = list(categories) self.assertEqual(len(categories), 1) self.assertTrue(isinstance(categories[0], models.PackageCategory)) self.assertEqual(len(categories[0].metadata['packagegroupids']), 5) self.assertTrue('firefox' in categories[0].metadata['packagegroupids']) self.assertEqual(categories[0].id, 'gnome-desktop-environment') self.assertEqual(categories[0].repo_id, 'repo1') # tests for fix to https://bugzilla.redhat.com/show_bug.cgi?id=1008010 self.assertEqual(categories[0].metadata['name'], 'GNOME Desktop') self.assertEqual( categories[0].metadata['description'], '\nGNOME is a highly intuitive and user friendly desktop environment.\n' ) self.assertEqual(len(categories[0].metadata['translated_description']), 8) self.assertEqual(len(categories[0].metadata['translated_name']), 8)