def test_get_available_with_no_group_data_present(self): groups, categories = comps.get_available(None) self.assertEquals(groups, {}) self.assertEquals(categories, {}) bad_data = os.path.join(self.data_dir, "simple_repo_no_comps") groups, categories = comps.get_available(bad_data) self.assertEqual(groups, {}) self.assertEqual(categories, {})
def test_sync_of_orphaned_data(self): # Sync repo with some initial data # Modify the underlying directory to make it look like source has changed # Re-sync # Verify orphaned groups/categories were removed ic = ImporterComps() repo_src_dir = os.path.join(self.data_dir, "test_orphaned_data_initial") feed_url = "file://%s" % (repo_src_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) repo = mock.Mock(spec=Repository) repo.id = "test_sync_of_orphaned_data" repo.working_dir = self.working_dir # Simulate a repo sync, copy the source contents to the repo.working_dir self.simulate_sync(repo, repo_src_dir) sync_conduit = importer_mocks.get_sync_conduit() status, summary, details = ic.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEqual(summary["num_available_groups"], 3) self.assertEqual(summary["num_available_categories"], 2) self.assertEqual(summary["num_new_groups"], 3) self.assertEqual(summary["num_new_categories"], 2) self.assertEqual(summary["num_orphaned_groups"], 0) self.assertEqual(summary["num_orphaned_categories"], 0) self.assertTrue(summary["time_total_sec"] > 0) # # Simulate the existing_units # avail_groups, avail_cats = comps.get_available(repo_src_dir) existing_cats, existing_cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo) existing_groups, existing_group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo) self.assertEquals(len(existing_cats), 2) self.assertEquals(len(existing_groups), 3) existing_units = [] existing_units.extend(existing_group_units.values()) existing_units.extend(existing_cat_units.values()) self.assertEquals(len(existing_units), (len(existing_cats) + len(existing_groups))) # # Now we will simulate a change to the feed and pass in our existing units # repo_src_dir = os.path.join(self.data_dir, "test_orphaned_data_final") feed_url = "file://%s" % (repo_src_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units) self.simulate_sync(repo, repo_src_dir) status, summary, details = ic.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEqual(summary["num_available_groups"], 2) self.assertEqual(summary["num_available_categories"], 1) self.assertEqual(summary["num_new_groups"], 0) self.assertEqual(summary["num_new_categories"], 0) self.assertEqual(summary["num_orphaned_groups"], 1) self.assertEqual(summary["num_orphaned_categories"], 1) self.assertTrue(summary["time_total_sec"] > 0)
def test_get_available(self): # Test with a valid comps.xml repo_dir = os.path.join(self.data_dir, "pulp_unittest") self.assertTrue(os.path.exists(repo_dir)) groups, categories = comps.get_available(repo_dir, md_types=["group"]) self.assertEqual(len(groups), 3) self.assertEqual(len(categories), 2) for g in groups.values(): keys = g.keys() for key_name in METADATA_PKG_GROUP: self.assertTrue(key_name in keys) for c in categories.values(): keys = c.keys() for key_name in METADATA_PKG_CATEGORY: self.assertTrue(key_name in keys) repo_dir = os.path.join(self.data_dir, "pulp_unittest") self.assertTrue(os.path.exists(repo_dir)) groups, categories = comps.get_available(repo_dir, md_types=["foo", "bar"]) self.assertEqual(len(groups), 0) self.assertEqual(len(categories), 0) for g in groups.values(): keys = g.keys() for key_name in METADATA_PKG_GROUP: self.assertTrue(key_name in keys) for c in categories.values(): keys = c.keys() for key_name in METADATA_PKG_CATEGORY: self.assertTrue(key_name in keys) repo_dir = os.path.join(self.data_dir, "pulp_unittest") self.assertTrue(os.path.exists(repo_dir)) groups, categories = comps.get_available(repo_dir) self.assertEqual(len(groups), 3) self.assertEqual(len(categories), 2) for g in groups.values(): keys = g.keys() for key_name in METADATA_PKG_GROUP: self.assertTrue(key_name in keys) for c in categories.values(): keys = c.keys() for key_name in METADATA_PKG_CATEGORY: self.assertTrue(key_name in keys)
def get_pkg_group_or_category(self, repo, type_id): repo_src_dir = os.path.join(self.data_dir, "test_comps_import_with_dots_in_pkg_names") sync_conduit = importer_mocks.get_sync_conduit() avail_groups, avail_cats = comps.get_available(repo_src_dir) if type_id == TYPE_ID_PKG_GROUP: groups, group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo) self.assertTrue(len(group_units) > 0) return group_units.values()[0] elif type_id == TYPE_ID_PKG_CATEGORY: cats, cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo) self.assertTrue(len(cat_units) > 0) return cat_units.values()[0] else: return None
def test_form_comps_xml(self): # Form several package groups and categories repo_src_dir = os.path.join(self.data_dir, "pulp_unittest") avail_groups, avail_cats = comps.get_available(repo_src_dir) # Translate the dicts into units repo = mock.Mock(spec=Repository) repo.id = "test_form_comps_xml" repo.working_dir = self.working_dir sync_conduit = importer_mocks.get_sync_conduit() initial_cats, initial_cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo) initial_groups, initial_group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo) # Write these to a comps.xml comps_xml = comps_util.form_comps_xml_from_units(initial_group_units.values(), initial_cat_units.values()) out_path = os.path.join(self.temp_dir, "test_form_comps.xml") f = open(out_path, "w") try: f.write(comps_xml) finally: f.close() # Read in comps.xml and parse final_groups, final_cats = comps.get_available(repo_src_dir, group_file=out_path, group_type='group') final_cats, final_cat_units = comps.get_new_category_units(final_cats, {}, sync_conduit, repo) final_groups, final_group_units = comps.get_new_group_units(final_groups, {}, sync_conduit, repo) # Verify we get the same data back self.assertEquals(len(initial_group_units), len(final_group_units)) self.assertEquals(len(initial_cat_units), len(final_cat_units)) # Examine Package Group Data for grp_key in initial_group_units: initial_unit = initial_group_units[grp_key] final_unit = final_group_units[grp_key] self.assertEquals(len(initial_unit.unit_key), len(final_unit.unit_key)) self.assertEquals(len(initial_unit.metadata), len(final_unit.metadata)) # Verify unit keys are same for key in initial_unit.unit_key: self.assertEquals(initial_unit.unit_key[key], final_unit.unit_key[key]) for key in initial_unit.metadata: self.assertEquals(initial_unit.metadata[key], final_unit.metadata[key])
def test_comps_imports_with_xz_compression(self): # Test with a valid xz group.xml.xz repo_dir = os.path.join(self.data_dir, "test_comps_with_xz_compress") self.assertTrue(os.path.exists(repo_dir)) groups, categories = comps.get_available(repo_dir, md_types=["group"]) print len(groups), len(categories) self.assertEqual(len(groups), 202) self.assertEqual(len(categories), 10) for g in groups.values(): keys = g.keys() for key_name in METADATA_PKG_GROUP: self.assertTrue(key_name in keys) for c in categories.values(): keys = c.keys() for key_name in METADATA_PKG_CATEGORY: self.assertTrue(key_name in keys)
def test_write_comps_with_centos6_comps_xml(self): repo = mock.Mock(spec=Repository) repo.id = "test_write_comps_with_i18n_data" repo.working_dir = self.working_dir sync_conduit = importer_mocks.get_sync_conduit() repo_src_dir = os.path.join(self.data_dir, "test_comps_import_with_dots_in_pkg_names") # Simulate a sync with CentOS 6 comps.xml data # The test data contains issues such as: # 1) conditional_package_names that contain a '.' in the key name # InvalidDocument: key 'openoffice.org-langpack-en' must not contain '.' # 2) unicode strings which are not being encoded correctly during write # UnicodeEncodeError: 'ascii' codec can't encode characters in position 334-341: ordinal not in range(128) avail_groups, avail_cats = comps.get_available(repo_src_dir) groups, group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo) cats, cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo) yum_distributor = YumDistributor() comps_xml_out_path = comps_util.write_comps_xml(repo, group_units.values(), cat_units.values()) self.assertEqual(comps_xml_out_path, os.path.join(repo.working_dir, "comps.xml")) yc = yum.comps.Comps() yc.add(comps_xml_out_path) self.assertTrue(len(group_units), len(yc.groups)) self.assertTrue(len(cat_units), len(yc.categories))
def test_comps_import_with_dots_in_pkg_names(self): # Test we are able to save problematic package groups/categories to mongo db = connection.database() dummy_collection_name = "unit_test_dummy_data" dummy_collection = getattr(db, dummy_collection_name) # Import from a CentOS 6 comps.xml containing: # http://mirror.centos.org/centos/6/os/x86_64/repodata/3a27232698a261aa4022fd270797a3006aa8b8a346cbd6a31fae1466c724d098-c6-x86_64-comps.xml # <packagereq requires="openoffice.org-core" type="conditional">openoffice.org-langpack-en</packagereq> # We were seeing exceptions like below: # InvalidDocument: key 'openoffice.org-langpack-en' must not contain '.' success = False try: repo_src_dir = os.path.join(self.data_dir, "test_comps_import_with_dots_in_pkg_names") avail_groups, avail_cats = comps.get_available(repo_src_dir) for grp in avail_groups.values(): dummy_collection.save(grp, safe=True) for cat in avail_cats.values(): dummy_collection.save(cat, safe=True) success = True finally: db.drop_collection(dummy_collection_name) self.assertTrue(success)