def test_resolve_deps(self): repo = mock.Mock(spec=Repository) repo.working_dir = "/tmp/test_resolve_deps" repo.id = "test_resolve_deps" unit_key_a = {'id' : '','name' :'pulp-server', 'version' :'0.0.309', 'release' : '1.fc17', 'epoch':'0', 'arch' : 'noarch', 'checksumtype' : 'sha256', 'checksum': 'ee5afa0aaf8bd2130b7f4a9b35f4178336c72e95358dd33bda8acaa5f28ea6e9', 'type_id' : 'rpm'} unit_key_a_obj = Unit(RPM_TYPE_ID, unit_key_a, {}, '') unit_key_a_obj.metadata = constants.PULP_SERVER_RPM_METADATA unit_key_b = {'id' : '', 'name' :'pulp-rpm-server', 'version' :'0.0.309', 'release' :'1.fc17', 'epoch':'0','arch' : 'noarch', 'checksumtype' :'sha256', 'checksum': '1e6c3a3bae26423fe49d26930b986e5f5ee25523c13f875dfcd4bf80f770bf56', 'type_id' : 'rpm', } unit_key_b_obj = Unit(RPM_TYPE_ID, unit_key_b, {}, '') unit_key_b_obj.metadata = constants.PULP_RPM_SERVER_RPM_METADATA existing_units = [] for unit in [unit_key_a_obj, unit_key_b_obj]: existing_units.append(unit) conduit = importer_mocks.get_dependency_conduit(type_id=RPM_TYPE_ID, existing_units=existing_units, pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config() importer = YumImporter() units = [Unit(RPM_TYPE_ID, unit_key_b, {}, '')] result = importer.resolve_dependencies(repo, units, conduit, config) self.assertEqual(len(list(itertools.chain(*result['resolved'].values()))), 1) self.assertEqual(len(list(itertools.chain(*result['unresolved'].values()))), 0)
def test_upload_rpm(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_upload_rpm" upload_conduit = importer_mocks.get_upload_conduit(pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config() importer = YumImporter() file_path = "%s/%s" % (self.data_dir, "incisura-7.1.4-1.elfake.noarch.rpm") mdata = {'filename' : "incisura-7.1.4-1.elfake.noarch.rpm", 'checksum' : 'e0e98e76e4e06dad65a82b0111651d7aca5b00fe'} unit_key = {'name' : 'incisura', 'version' : '7.1.4', 'release' : '1', 'arch' : 'noarch', 'checksum' : 'e0e98e76e4e06dad65a82b0111651d7aca5b00fe', 'checksumtype' : 'sha1'} type_id = "rpm" status, summary, details = importer._upload_unit(repo, type_id, unit_key, mdata, file_path, upload_conduit, config) self.assertTrue(status) self.assertTrue(summary is not None) self.assertTrue(details is not None) # validate if metadata was generated self.assertTrue(mdata.has_key('repodata')) self.assertTrue(mdata['repodata']['primary'] is not None) self.assertTrue(mdata['repodata']['other'] is not None) self.assertTrue(mdata['repodata']['filelists'] is not None) primary_snippet = mdata['repodata']['primary'] location_start_index = primary_snippet.find("href=") # verify the location matches the basename of the package self.assertTrue(primary_snippet[location_start_index:location_start_index + len("href=") + len("incisura-7.1.4-1.elfake.noarch.rpm") + 2 ] == "href=\"incisura-7.1.4-1.elfake.noarch.rpm\"") self.assertEquals(summary["num_units_saved"], 1) self.assertEquals(summary["num_units_processed"], 1) self.assertEquals(summary["filename"], "incisura-7.1.4-1.elfake.noarch.rpm") self.assertEquals(details["errors"], [])
def test_repo_scratchpad_settings(self): global repo_scratchpad repo_scratchpad = {} def set_repo_scratchpad(data): global repo_scratchpad repo_scratchpad = data def get_repo_scratchpad(): global repo_scratchpad return repo_scratchpad feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/test_drpm_repo/" importer = YumImporter() repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_repo_scratchpad" sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir) sync_conduit.set_repo_scratchpad = mock.Mock() sync_conduit.set_repo_scratchpad.side_effect = set_repo_scratchpad sync_conduit.get_repo_scratchpad = mock.Mock() sync_conduit.get_repo_scratchpad.side_effect = get_repo_scratchpad config = importer_mocks.get_basic_config(feed_url=feed_url) importer._sync_repo(repo, sync_conduit, config) print "SCRATCHPAD %s" % repo_scratchpad self.assertEquals(repo_scratchpad['checksum_type'], 'sha256') self.assertTrue(repo_scratchpad.has_key("repodata")) self.assertTrue(repo_scratchpad["repodata"].has_key("prestodelta"))
def test_skip_packagegroups(self): global updated_progress updated_progress = None def set_progress(progress): global updated_progress updated_progress = progress yi = YumImporter() skip = ["packagegroup"] repo_src_dir = os.path.join(self.data_dir, "pulp_unittest") feed_url = "file://%s" % (repo_src_dir) config = importer_mocks.get_basic_config(feed_url=feed_url,skip_content_types=skip) repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_skip_packagegroup" # Simulate a repo sync, copy the source contents to the repo.working_dir self.simulate_sync(repo, repo_src_dir) sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir) sync_conduit.set_progress = mock.Mock() sync_conduit.set_progress = set_progress status, summary, details = yi._sync_repo(repo, sync_conduit, config) self.assertTrue(status) self.assertEqual(updated_progress["comps"]["state"], "SKIPPED")
def test_progress_sync(self): global updated_progress updated_progress = None def set_progress(progress): global updated_progress updated_progress = progress feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" importer = YumImporter() repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_progress_sync" sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir) sync_conduit.set_progress = mock.Mock() sync_conduit.set_progress.side_effect = set_progress config = importer_mocks.get_basic_config(feed_url=feed_url) status, summary, details = importer._sync_repo(repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["packages"]["num_synced_new_rpms"], 3) self.assertTrue(updated_progress is not None) self.assertTrue("metadata" in updated_progress) self.assertTrue(updated_progress["metadata"].has_key("state")) self.assertTrue("errata" in updated_progress) self.assertTrue(updated_progress["errata"].has_key("state")) self.assertTrue("content" in updated_progress) self.assertTrue(updated_progress["content"].has_key("state")) self.assertEquals(updated_progress["content"]["state"], "FINISHED") for key in importer_rpm.PROGRESS_REPORT_FIELDS: self.assertTrue(key in updated_progress["content"])
def test_cancel_sync(self): global updated_progress updated_progress = None def set_progress(progress): global updated_progress updated_progress = progress class SyncThread(threading.Thread): def __init__(self, importer, repo, sync_conduit, config): threading.Thread.__init__(self) self.importer = importer self.repo = repo self.sync_conduit = sync_conduit self.config = config self.status = None self.summary = None self.details = None self.finished = False def run(self): status, summary, details = self.importer._sync_repo(self.repo, self.sync_conduit, self.config) self.status = status self.summary = summary self.details = details self.finished = True feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/v1/testing/6Server/x86_64/" repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_cancel_sync" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) sync_conduit.set_progress = mock.Mock() sync_conduit.set_progress.side_effect = set_progress config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=1, max_speed=25) importer = YumImporter() sync_thread = SyncThread(importer, repo, sync_conduit, config) sync_thread.start() # Wait to confirm that sync has started and we are downloading packages # We are intentionally setting the 'config' to use 1 thread and max_speed to be low so we will # have a chance to cancel the sync before it completes for i in range(30): if updated_progress and updated_progress.has_key("content") and updated_progress["content"].has_key("state") \ and updated_progress["content"]["state"] == "IN_PROGRESS": break time.sleep(1) self.assertEquals(updated_progress["metadata"]["state"], "FINISHED") self.assertEquals(updated_progress["content"]["state"], "IN_PROGRESS") ### ### Issue Cancel ### importer.cancel_sync_repo(None, None) # Wait for cancel of sync for i in range(45): if sync_thread.finished: break time.sleep(1) self.assertEquals(updated_progress["content"]["state"], "CANCELED") self.assertFalse(sync_thread.status)
def test_feedless_repo_sync(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_feedless_repo_sync" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, pkg_dir=self.pkg_dir) sync_conduit.set_progress = mock.Mock() config = importer_mocks.get_basic_config() importer = YumImporter() status, summary, details = importer._sync_repo(repo, sync_conduit, config) self.assertFalse(status) self.assertEquals(summary['error'], "Cannot perform repository sync on a repository with no feed")
def test_distribution_unit_import(self): existing_units = [] dunit_key = {} dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64" dunit_key['version'] = "16" dunit_key['arch'] = "x86_64" dunit_key['family'] = "TestFamily" dunit_key['variant'] = "TestVariant" metadata = { "files" : [{"checksumtype" : "sha256", "relativepath" : "images/fileA.txt", "fileName" : "fileA.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.data_dir, "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab", "filename" : "fileA.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileB.txt", "fileName" : "fileB.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.data_dir, "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4", "filename" : "fileB.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileC.iso", "fileName" : "fileC.iso", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.data_dir, "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74", "filename" : "fileC.iso", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 } ],} distro_unit = [Unit(TYPE_ID_DISTRO, dunit_key, metadata, '')] distro_unit[0].storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir existing_units += distro_unit # REPO A (source) repoA = mock.Mock(spec=Repository) repoA.working_dir = self.data_dir repoA.id = "test_distro_unit_copy" # REPO B (target) repoB = mock.Mock(spec=Repository) repoB.working_dir = self.working_dir repoB.id = "repoB" conduit = importer_mocks.get_import_conduit([distro_unit], existing_units=existing_units) config = importer_mocks.get_basic_config() importer = YumImporter() # Test result = importer.import_units(repoA, repoB, conduit, config, distro_unit) # Verify print conduit.associate_unit.call_args_list associated_units = [mock_call[0][0] for mock_call in conduit.associate_unit.call_args_list] self.assertEqual(len(associated_units), len(distro_unit)) for u in associated_units: self.assertTrue(u in distro_unit)
def test_upload_package_group(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_upload_package_group" upload_conduit = importer_mocks.get_upload_conduit(pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config() importer = YumImporter() file_path = None type_id = TYPE_ID_PKG_CATEGORY unit = self.get_pkg_group_or_category(repo, type_id) status, summary, details = importer._upload_unit(repo, type_id, unit.unit_key, unit.metadata, file_path, upload_conduit, config) self.assertTrue(status) self.assertEqual(summary['state'], 'FINISHED')
def test_validate_config(self): feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" importer = YumImporter() config = importer_mocks.get_basic_config(feed_url=feed_url) repo = mock.Mock(spec=Repository) state, msg = importer.validate_config(repo, config, []) self.assertTrue(state) # Test that an unknown argument in the config throws an error # and the unknown arg is identified in the message config = importer_mocks.get_basic_config(feed_url=feed_url, bad_unknown_arg="blah") state, msg = importer.validate_config(repo, config, []) self.assertFalse(state) self.assertTrue("bad_unknown_arg" in msg)
def test_upload_erratum(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_upload_errata" upload_conduit = importer_mocks.get_upload_conduit(pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config() importer = YumImporter() file_path = [] type_id = "erratum" unit_key = dict() unit_key['id'] = "RHBA-2012:0101" metadata = {"pkglist" : [],} status, summary, details = importer._upload_unit(repo, type_id, unit_key, metadata, file_path, upload_conduit, config) self.assertTrue(status) self.assertEqual(summary['state'], 'FINISHED')
def setUp(self): super(TestValidateConfig, self).setUp() self.temp_dir = tempfile.mkdtemp() self.repo = mock.Mock(spec=Repository) self.repo.working_dir = os.path.join(self.temp_dir, "repo_working_dir") os.makedirs(self.repo.working_dir) self.importer = YumImporter() self.init()
def setup_source_repo(self): # Sync a sample repository to populate and setup up Source Repo source_repo = mock.Mock(spec=Repository) source_repo.id = "repo_a" source_repo.working_dir = os.path.join(self.working_dir, source_repo.id) importer = YumImporter() feed_url = "file://%s/pulp_unittest/" % (self.data_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) status, summary, details = importer._sync_repo(source_repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["packages"]["num_synced_new_rpms"], 3) # Confirm regular RPM files exist under self.pkg_dir pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir) self.assertEquals(len(pkgs), 3) for p in pkgs: self.assertTrue(os.path.isfile(p)) # Confirm symlinks to RPMs exist under repo.working_dir sym_links = self.get_files_in_dir("*.rpm", source_repo.working_dir) self.assertEquals(len(pkgs), 3) for link in sym_links: self.assertTrue(os.path.islink(link)) # # Now we have some test data in the source repo # # Simulate what import_conduit.get_source_repos would return # metadata = {} source_units = [] storage_path = '%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = {"filename":filename} source_units.append(Unit(TYPE_ID_RPM, unit_key, metadata, storage_path)) storage_path = '%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = {"filename":filename} source_units.append(Unit(TYPE_ID_RPM, unit_key, metadata, storage_path)) storage_path = '%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = {"filename":filename} source_units.append(Unit(TYPE_ID_RPM, unit_key, metadata, storage_path)) # Pass in the simulated source_units to the import_conduit import_conduit = importer_mocks.get_import_conduit(source_units=source_units) return importer, source_repo, source_units, import_conduit, config
def test_upload_rpm(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_upload_rpm" upload_conduit = importer_mocks.get_upload_conduit(pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config() importer = YumImporter() file_path = "%s/%s" % (self.data_dir, "incisura-7.1.4-1.elfake.noarch.rpm") metadata = {'filename' : "incisura-7.1.4-1.elfake.noarch.rpm", 'checksum' : 'e0e98e76e4e06dad65a82b0111651d7aca5b00fe'} unit_key = {'name' : 'incisura', 'version' : '7.1.4', 'release' : '1', 'arch' : 'noarch', 'checksum' : 'e0e98e76e4e06dad65a82b0111651d7aca5b00fe', 'checksumtype' : 'sha1'} type_id = "rpm" status, summary, details = importer._upload_unit(repo, type_id, unit_key, metadata, file_path, upload_conduit, config) self.assertTrue(status) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertEquals(summary["num_units_saved"], 1) self.assertEquals(summary["num_units_processed"], 1) self.assertEquals(summary["filename"], "incisura-7.1.4-1.elfake.noarch.rpm") self.assertEquals(details["errors"], [])
def test_local_sync_with_bad_url(self): feed_url = "file:///INTENTIONAL_BAD_URL/demo_repos/pulp_unittest/" repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_local_sync_with_bad_url" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) caught_exception = False try: importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) except: caught_exception = True self.assertTrue(caught_exception) importer = YumImporter() caught_exception = False try: report = importer.sync_repo(repo, sync_conduit, config) except: caught_exception = True self.assertFalse(caught_exception)
def test_import(self): # Setup existing_units = self.existing_units() # REPO A (source) repoA = mock.Mock(spec=Repository) repoA.working_dir = self.data_dir repoA.id = "test_resolve_deps" # REPO B (target) repoB = mock.Mock(spec=Repository) repoB.working_dir = self.working_dir repoB.id = "repoB" units = [Unit(TYPE_ID_RPM, self.UNIT_KEY_B, {}, '')] conduit = importer_mocks.get_import_conduit(units, existing_units=existing_units) config = importer_mocks.get_basic_config() importer = YumImporter() # Test result = importer.import_units(repoA, repoB, conduit, config, units) # Verify associated_units = [mock_call[0][0] for mock_call in conduit.associate_unit.call_args_list] self.assertEqual(len(associated_units), len(units)) for u in associated_units: self.assertTrue(u in units)
def test_package_category_unit_import(self): # REPO A (source) repoA = mock.Mock(spec=Repository) repoA.working_dir = self.data_dir repoA.id = "test_pkg_cat_unit_copy" # REPO B (target) repoB = mock.Mock(spec=Repository) repoB.working_dir = self.working_dir repoB.id = "repoB" # Create 2 pkg groups grp_a = self.create_dummy_pkg_group_unit(repoA.id, "group_a") # Create 2 pkg categories cat_a = self.create_dummy_pkg_category_unit(repoA.id, "cat_a", ["group_a"]) # Add the grps/cats to the publish_conduit existing_units=[grp_a, cat_a] conduit = importer_mocks.get_import_conduit([cat_a], existing_units=existing_units) config = importer_mocks.get_basic_config() importer = YumImporter() # Test result = importer.import_units(repoA, repoB, conduit, config, [cat_a]) # Verify associated_units = [mock_call[0][0] for mock_call in conduit.associate_unit.call_args_list] for u in associated_units: self.assertTrue(u in [cat_a, grp_a])
def test_import_with_dependencies(self): # Setup existing_units = self.existing_units() # REPO A (source) repoA = mock.Mock(spec=Repository) repoA.working_dir = "/tmp/test_resolve_deps" repoA.id = "test_resolve_deps" # REPO B (target) repoB = mock.Mock(spec=Repository) repoB.working_dir = "/tmp/test_resolve_deps" repoB.id = "repo_b" units = [Unit(TYPE_ID_RPM, self.UNIT_KEY_B, {}, '')] conduit = importer_mocks.get_import_conduit(units, existing_units=existing_units) config = importer_mocks.get_basic_config() config.override_config['recursive'] = True config.override_config['resolve_dependencies'] = True importer = YumImporter() # Test result = importer.import_units(repoA, repoB, conduit, config, units) # Verify associated_units = [mock_call[0][0] for mock_call in conduit.associate_unit.call_args_list] self.assertEqual(len(associated_units), len(existing_units)) for u in associated_units: self.assertTrue(u in existing_units + units)
class TestValidateConfig(rpm_support_base.PulpRPMTests): def setUp(self): super(TestValidateConfig, self).setUp() self.temp_dir = tempfile.mkdtemp() self.repo = mock.Mock(spec=Repository) self.repo.working_dir = os.path.join(self.temp_dir, "repo_working_dir") os.makedirs(self.repo.working_dir) self.importer = YumImporter() self.init() def tearDown(self): super(TestValidateConfig, self).tearDown() shutil.rmtree(self.temp_dir) def init(self): self.data_dir = os.path.abspath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "../data")) def test_config_feed_url(self): # test bad feed_url feed_url = "fake://example.redhat.com/" config = importer_mocks.get_basic_config(feed_url=feed_url) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) feed_url = "http://example.redhat.com/" config = importer_mocks.get_basic_config(feed_url=feed_url) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_ssl_verify(self): feed_url = "http://example.redhat.com/" ssl_verify = "fake" config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_verify=ssl_verify) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) ssl_verify = True config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_verify=ssl_verify) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_ssl_ca_cert(self): if not M2CRYPTO_HAS_CRL_SUPPORT: return feed_url = "http://example.redhat.com/" ssl_ca_cert = "fake_path_to_ca" config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_ca_cert=ssl_ca_cert) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) ssl_ca_cert = open(os.path.join(self.data_dir, "valid_ca.crt")).read() config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_ca_cert=ssl_ca_cert) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) ssl_ca_cert_filename = os.path.join(self.repo.working_dir, "ssl_ca_cert") self.assertTrue(os.path.exists(ssl_ca_cert_filename)) ca_cert_data = open(ssl_ca_cert_filename).read() self.assertEqual(ca_cert_data, ssl_ca_cert) def test_config_ssl_client_cert(self): if not M2CRYPTO_HAS_CRL_SUPPORT: return feed_url = "http://example.redhat.com/" ssl_client_cert = "fake_path_to_client_cert" config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_client_cert=ssl_client_cert) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) ssl_client_cert = open(os.path.join(self.data_dir, "cert.crt")).read() config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_client_cert=ssl_client_cert) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) ssl_client_cert_filename = os.path.join(self.repo.working_dir, "ssl_client_cert") self.assertTrue(os.path.exists(ssl_client_cert_filename)) client_cert_data = open(ssl_client_cert_filename).read() self.assertEqual(client_cert_data, ssl_client_cert) def test_config_proxy_url(self): feed_url = "http://example.redhat.com/" proxy_url = "fake://proxy" config = importer_mocks.get_basic_config(feed_url=feed_url, proxy_url=proxy_url) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) proxy_url = "http://proxy" config = importer_mocks.get_basic_config(feed_url=feed_url, proxy_url=proxy_url) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_proxy_port(self): config = importer_mocks.get_basic_config(proxy_port=100) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) config = importer_mocks.get_basic_config(proxy_port='port') state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) def test_config_max_speed(self): feed_url = "http://example.redhat.com/" max_speed = "fake_speed" config = importer_mocks.get_basic_config(feed_url=feed_url, max_speed=max_speed) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) max_speed = 100 config = importer_mocks.get_basic_config(feed_url=feed_url, max_speed=max_speed) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_verify_checksum(self): feed_url = "http://example.redhat.com/" verify_checksum = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, verify_checksum=verify_checksum) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) verify_checksum = True config = importer_mocks.get_basic_config(feed_url=feed_url, verify_checksum=verify_checksum) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_verify_size(self): feed_url = "http://example.redhat.com/" verify_size = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, verify_size=verify_size) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) verify_size = True config = importer_mocks.get_basic_config(feed_url=feed_url, verify_size=verify_size) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_newest(self): feed_url = "http://example.redhat.com/" newest = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, newest=newest) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) newest = True config = importer_mocks.get_basic_config(feed_url=feed_url, newest=newest) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_remove_old(self): feed_url = "http://example.redhat.com/" remove_old = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=remove_old) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) remove_old = True config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=remove_old) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_num_threads(self): feed_url = "http://example.redhat.com/" num_threads = "fake_int" config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=num_threads) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) num_threads = 5 config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=num_threads) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_num_old_packages(self): feed_url = "http://example.redhat.com/" num_old_packages = "fake_int" config = importer_mocks.get_basic_config(feed_url=feed_url, num_old_packages=num_old_packages) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) num_old_packages = 4 config = importer_mocks.get_basic_config(feed_url=feed_url, num_old_packages=num_old_packages) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_purge_orphaned(self): feed_url = "http://example.redhat.com/" purge_orphaned = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, purge_orphaned=purge_orphaned) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) purge_orphaned = True config = importer_mocks.get_basic_config(feed_url=feed_url, purge_orphaned=purge_orphaned) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_checksum_type(self): feed_url = "http://example.redhat.com/" checksum_type ="fake_checksum" config = importer_mocks.get_basic_config(feed_url=feed_url, checksum_type=checksum_type) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) checksum_type ="sha" config = importer_mocks.get_basic_config(feed_url=feed_url, checksum_type=checksum_type) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) def test_config_skip(self): feed_url = "http://example.redhat.com/" skip_content_types = "" config = importer_mocks.get_basic_config(feed_url=feed_url, skip=skip_content_types) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) skip_content_types = [] config = importer_mocks.get_basic_config(feed_url=feed_url, skip=skip_content_types) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_package_group_unit_import(self): # REPO A (source) repoA = mock.Mock(spec=Repository) repoA.working_dir = self.data_dir repoA.id = "test_pkg_grp_unit_copy" # REPO B (target) repoB = mock.Mock(spec=Repository) repoB.working_dir = self.working_dir repoB.id = "repoB" # Create 2 pkg groups grp_a = self.create_dummy_pkg_group_unit(repoA.id, "group_a") verify_units =[grp_a] source_units = [] storage_path = '%s/pulp-dot-1.0-test/0.1.1/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-dot-1.0-test', 'version':'0.1.1', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'checksumtype':'sha256', } metadata = { 'filename':filename } u = Unit(TYPE_ID_RPM, unit_key, metadata, storage_path) source_units.append(u) verify_units.append(u) storage_path = '%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-dot-2.0-test', 'version':'0.1.2', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'checksumtype':'sha256', } metadata = { 'filename':filename } u = Unit(TYPE_ID_RPM, unit_key, metadata, storage_path) source_units.append(u) verify_units.append(u) storage_path = '%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-test-package', 'version':'0.3.1', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'checksumtype':'sha256', } metadata = { 'filename':filename } u = Unit(TYPE_ID_RPM, unit_key, metadata, storage_path) source_units.append(u) verify_old_version_skipped = [u] storage_path = '%s/pulp-test-package/0.3.2/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-test-package', 'version':'0.3.2', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'checksumtype':'sha256', } metadata = { 'filename':filename } u = Unit(TYPE_ID_RPM, unit_key, metadata, storage_path) source_units.append(u) verify_units.append(u) storage_path = '%s/pulp-test-optional-package/0.3.2/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-optional-package', 'version':'0.1.1', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'checksumtype':'sha256', } metadata = { 'filename':filename } u = Unit(TYPE_ID_RPM, unit_key, metadata, storage_path) source_units.append(u) verify_units.append(u) grp_a.metadata['mandatory_package_names'] = ["pulp-test-package",] grp_a.metadata['default_package_names'] = ["pulp-dot-2.0-test"] grp_a.metadata['optional_package_names'] = ["pulp-optional-package"] grp_a.metadata['conditional_package_names'] = [('pulp-dot-1.0-test', [])] existing_units = [grp_a] conduit = importer_mocks.get_import_conduit(source_units + [grp_a], existing_units=source_units + existing_units) config = importer_mocks.get_basic_config() importer = YumImporter() # Test result = importer.import_units(repoA, repoB, conduit, config, [grp_a]) # Verify associated_units = [mock_call[0][0] for mock_call in conduit.associate_unit.call_args_list] # verify expected units are in associate units for u in verify_units: self.assertTrue(u in associated_units) # verify that the version compare worked and skipped old versions for u in verify_old_version_skipped: self.assertFalse(u in associated_units)
def test_errors_with_local_sync(self): if os.getuid() == 0: # skip if running as root return global updated_progress updated_progress = None def set_progress(progress): global updated_progress updated_progress = progress importer = YumImporter() feed_url = "file://%s/local_errors/" % (self.data_dir) repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_errors_with_local_sync" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) sync_conduit.set_progress = mock.Mock() sync_conduit.set_progress.side_effect = set_progress config = importer_mocks.get_basic_config(feed_url=feed_url) test_rpm_with_error = os.path.join(self.data_dir, "local_errors", "pulp-test-package-0.3.1-1.fc11.x86_64.rpm") orig_stat = os.stat(test_rpm_with_error) try: os.chmod(test_rpm_with_error, 0000) self.assertFalse(os.access(test_rpm_with_error, os.R_OK)) status, summary, details = importer._sync_repo(repo, sync_conduit, config) finally: os.chmod(test_rpm_with_error, orig_stat.st_mode) self.assertFalse(status) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertEquals(summary["packages"]["num_not_synced_rpms"], 1) self.assertEquals(details["packages"]["size_total"], 6791) # Confirm regular RPM files exist under self.pkg_dir pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir) self.assertEquals(len(pkgs), 2) sym_links = self.get_files_in_dir("*.rpm", repo.working_dir) self.assertEquals(len(pkgs), 2) expected_details = { 'rpm': { 'num_success': 2, 'total_count': 3, 'items_left': 0, 'size_left': 0.0, 'total_size_bytes': 6791, 'num_error': 1 } } self.assertTrue(updated_progress.has_key("metadata")) self.assertEqual(updated_progress["metadata"]["state"], "FINISHED") self.assertTrue(updated_progress.has_key("errata")) self.assertEqual(updated_progress["errata"]["state"], "FINISHED") self.assertEqual(updated_progress["errata"]["num_errata"], 52) self.assertTrue(updated_progress.has_key("content")) self.assertEqual(updated_progress["content"]["state"], "FINISHED") self.assertEqual(updated_progress["content"]["items_total"], 3) self.assertEqual(updated_progress["content"]["items_left"], 0) self.assertEqual(updated_progress["content"]["num_success"], 2) self.assertEqual(updated_progress["content"]["num_error"], 1) self.assertEqual(updated_progress["content"]["size_total"], 6791) self.assertEqual(updated_progress["content"]["size_left"], 0) for type_id in (BaseFetch.FILE, BaseFetch.TREE_FILE, BaseFetch.DELTA_RPM): self.assertTrue(updated_progress["content"]["details"].has_key(type_id)) self.assertEqual(updated_progress["content"]["details"][type_id]["num_success"], 0) self.assertEqual(updated_progress["content"]["details"][type_id]["num_error"], 0) self.assertEqual(updated_progress["content"]["details"][type_id]["size_total"], 0) self.assertEqual(updated_progress["content"]["details"][type_id]["size_left"], 0) self.assertEqual(updated_progress["content"]["details"][type_id]["items_total"], 0) self.assertEqual(updated_progress["content"]["details"][type_id]["items_left"], 0) # 'rpm': {'num_success': 2, 'size_total': 6791, 'items_left': 0, # 'items_total': 3, 'size_left': 0.0, 'num_error': 1} self.assertTrue(updated_progress["content"]["details"].has_key("rpm")) self.assertEqual(updated_progress["content"]["details"]["rpm"]["num_success"], 2) self.assertEqual(updated_progress["content"]["details"]["rpm"]["num_error"], 1) self.assertEqual(updated_progress["content"]["details"]["rpm"]["size_total"], 6791) self.assertEqual(updated_progress["content"]["details"]["rpm"]["size_left"], 0) self.assertEqual(updated_progress["content"]["details"]["rpm"]["items_total"], 3) self.assertEqual(updated_progress["content"]["details"]["rpm"]["items_left"], 0) # # Check error_details # error has keys of: {"error_type", "traceback", "value", "exception"} # self.assertEqual(len(updated_progress["content"]["error_details"]), 1) error = updated_progress["content"]["error_details"][0] self.assertEqual(error["filename"], "pulp-test-package-0.3.1-1.fc11.x86_64.rpm") self.assertEqual(error["value"], '(37, "Couldn\'t open file %s")' % (test_rpm_with_error)) self.assertTrue('pycurl.error' in error["error_type"]) self.assertTrue(isinstance(error["exception"], basestring)) self.assertTrue(len(error["traceback"]) > 0)
def setup_source_repo(self): # Sync a sample repository to populate and setup up Source Repo source_repo = mock.Mock(spec=Repository) source_repo.id = "repo_a" source_repo.working_dir = os.path.join(self.working_dir, source_repo.id) importer = YumImporter() feed_url = "file://%s/pulp_unittest/" % (self.data_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) status, summary, details = importer._sync_repo(source_repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["packages"]["num_synced_new_rpms"], 3) # # Now we have some test data in the source repo # # Simulate what import_conduit.get_source_repos would return # source_units = [] storage_path = '%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-dot-2.0-test', 'version':'0.1.2', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'checksumtype':'sha256', } metadata = { 'filename':filename } source_units.append(Unit(TYPE_ID_RPM, unit_key, metadata, storage_path)) storage_path = '%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-test-package', 'version':'0.3.1', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'checksumtype':'sha256', } metadata = { 'filename':filename } source_units.append(Unit(TYPE_ID_RPM, unit_key, metadata, storage_path)) storage_path = '%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm' % (self.pkg_dir) filename = os.path.basename(storage_path) unit_key = { 'name':'pulp-test-package', 'version':'0.2.1', 'release':'1.fc11', 'epoch':'0', 'arch':'x86_64', 'checksum':'4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'checksumtype':'sha256', } metadata = { 'filename':filename } source_units.append(Unit(TYPE_ID_RPM, unit_key, metadata, storage_path)) # Pass in the simulated source_units to the import_conduit import_conduit = importer_mocks.get_import_conduit(source_units=source_units, existing_units=source_units) return importer, source_repo, source_units, import_conduit, config
def test_metadata(self): metadata = YumImporter.metadata() self.assertEquals(metadata["id"], TYPE_ID_IMPORTER_YUM) self.assertTrue(TYPE_ID_RPM in metadata["types"])
def test_errata_import_units(self): existing_units = [] unit_key = dict() unit_key['id'] = "RHEA-2010:9999" mdata = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2010-03-30 08:07:30', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('sha', '017c12050a97cf6095892498750c2a39d2bf535e'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('sha', '663c89b0d29bfd5479d8736b716d50eed9495dbb'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2010-03-30 08:07:30', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} unit_key_2 = dict() unit_key_2['id'] = "RHEA-2008:9999" mdata_2 = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2008-03-30 00:00:00', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('sha', '017c12050a97cf6095892498750c2a39d2bf535e'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('sha', '663c89b0d29bfd5479d8736b716d50eed9495dbb'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2008-03-30 00:00:00', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, ''), Unit(TYPE_ID_ERRATA, unit_key_2, mdata_2, '')] existing_units += errata_unit # REPO A (source) repoA = mock.Mock(spec=Repository) repoA.working_dir = self.data_dir repoA.id = "test_errata_unit_copy" # REPO B (target) repoB = mock.Mock(spec=Repository) repoB.working_dir = self.working_dir repoB.id = "repoB" conduit = importer_mocks.get_import_conduit(errata_unit, existing_units=existing_units) config = importer_mocks.get_basic_config(blacklist=['patb']) importer = YumImporter() # Test result = importer.import_units(repoA, repoB, conduit, config, errata_unit) # Verify associated_units = [mock_call[0][0] for mock_call in conduit.associate_unit.call_args_list] self.assertEqual(len(associated_units), len(errata_unit)) for u in associated_units: self.assertTrue(u in errata_unit)