def test_remove_old_packages(self): feed_url = "http://jmatthews.fedorapeople.org/repo_multiple_versions/" repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_remove_old_packages" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, pkg_dir=self.pkg_dir) ### # Test that old packages are not in rpmList and are never intended to be downloaded # Additionallity verify that already existing packages which are NOT orphaned are also # removed with remove_old functionality ### config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=False, num_old_packages=0) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["num_synced_new_rpms"], 12) pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir) self.assertEquals(len(pkgs), 12) yumRepoGrinder = importer_rpm.get_yumRepoGrinder(repo.id, repo.working_dir, config) yumRepoGrinder.setup(basepath=repo.working_dir) rpm_items = yumRepoGrinder.getRPMItems() yumRepoGrinder.stop() del yumRepoGrinder self.assertEquals(len(rpm_items), 12) existing_units = [] for rpm in rpm_items: u = Unit( TYPE_ID_RPM, importer_rpm.form_rpm_unit_key(rpm), importer_rpm.form_rpm_metadata(rpm), os.path.join(self.pkg_dir, rpm["pkgpath"], rpm["filename"]), ) existing_units.append(u) config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=True, num_old_packages=6) sync_conduit = importer_mocks.get_sync_conduit( type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir ) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["num_rpms"], 7) self.assertEquals(summary["num_orphaned_rpms"], 5) self.assertEquals(summary["num_synced_new_rpms"], 0) self.assertEquals(summary["num_not_synced_rpms"], 0) pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir) self.assertEquals(len(pkgs), 7) config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=True, num_old_packages=0) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["num_rpms"], 1) self.assertEquals(summary["num_orphaned_rpms"], 11) self.assertEquals(summary["num_synced_new_rpms"], 0) self.assertEquals(summary["num_not_synced_rpms"], 0) pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir) self.assertEquals(len(pkgs), 1)
def test_config_proxy_port(self): config = importer_mocks.get_basic_config(proxy_port=100) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state) config = importer_mocks.get_basic_config(proxy_port='port') state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state)
def test_errata_sync_with_repos_that_share_upstream_url(self): # This test is for https://bugzilla.redhat.com/show_bug.cgi?id=870495 feed_url = "http://example.com/test_repo/" # Set up repo_1 and sync it importer_1 = YumImporter() repo_1 = mock.Mock(spec=Repository) repo_1.working_dir = self.working_dir repo_1.id = "test_repo_1" sync_conduit_1 = importer_mocks.get_sync_conduit() config_1 = importer_mocks.get_basic_config(feed_url=feed_url) self.simulate_sync(repo_1, self.repo_dir) importer_errata_1 = errata.ImporterErrata() status_1, summary_1, details_1 = importer_errata_1.sync(repo_1, sync_conduit_1, config_1) self.assertTrue(status_1) self.assertTrue(summary_1 is not None) self.assertTrue(details_1 is not None) self.assertEquals(summary_1["num_new_errata"], 52) self.assertEquals(summary_1["num_existing_errata"], 0) self.assertEquals(summary_1["num_orphaned_errata"], 0) self.assertEquals(details_1["num_bugfix_errata"], 36) self.assertEquals(details_1["num_security_errata"], 7) self.assertEquals(details_1["num_enhancement_errata"], 9) # We should have called save_unit() once for each errata, in sync(). self.assertEqual(len(sync_conduit_1.save_unit.mock_calls), 52) # Now let's set up another repo with the same URL, and then sync. We should get the same # errata. importer_2 = YumImporter() repo_2 = mock.Mock(spec=Repository) working_dir_2 = os.path.join(self.temp_dir, "working_2") os.makedirs(working_dir_2) repo_2.working_dir = working_dir_2 repo_2.id = "test_repo_2" unit_key = {'id': "RHBA-2007:0112"} metadata = {'updated' : "2007-03-14 00:00:00", 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)'}]} existing_units = [Unit(TYPE_ID_ERRATA, unit_key, metadata, '')] existing_units[0].updated = metadata['updated'] sync_conduit_2 = importer_mocks.get_sync_conduit(existing_units=existing_units) config_2 = importer_mocks.get_basic_config(feed_url=feed_url) self.simulate_sync(repo_2, self.repo_dir) importer_errata_2 = errata.ImporterErrata() status_2, summary_2, details_2 = importer_errata_2.sync(repo_2, sync_conduit_2, config_2) self.assertTrue(status_2) self.assertTrue(summary_2 is not None) self.assertTrue(details_2 is not None) self.assertEquals(summary_2["num_new_errata"], 51) self.assertEquals(summary_2["num_existing_errata"], 1) self.assertEquals(summary_2["num_orphaned_errata"], 0) self.assertEquals(details_2["num_bugfix_errata"], 35) self.assertEquals(details_2["num_security_errata"], 7) self.assertEquals(details_2["num_enhancement_errata"], 9) # There should be the same number of calls to save_unit() as there are errata, # because sync() calls it once for each of the 51 new erratum, and get_new_errata_units() # also calls it once for the one errata that already existed self.assertEqual(len(sync_conduit_2.save_unit.mock_calls), 52)
def test_sync_of_orphaned_data(self): # Sync repo with some initial data # Modify the underlying directory to make it look like source has changed # Re-sync # Verify orphaned groups/categories were removed ic = ImporterComps() repo_src_dir = os.path.join(self.data_dir, "test_orphaned_data_initial") feed_url = "file://%s" % (repo_src_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) repo = mock.Mock(spec=Repository) repo.id = "test_sync_of_orphaned_data" repo.working_dir = self.working_dir # Simulate a repo sync, copy the source contents to the repo.working_dir self.simulate_sync(repo, repo_src_dir) sync_conduit = importer_mocks.get_sync_conduit() status, summary, details = ic.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEqual(summary["num_available_groups"], 3) self.assertEqual(summary["num_available_categories"], 2) self.assertEqual(summary["num_new_groups"], 3) self.assertEqual(summary["num_new_categories"], 2) self.assertEqual(summary["num_orphaned_groups"], 0) self.assertEqual(summary["num_orphaned_categories"], 0) self.assertTrue(summary["time_total_sec"] > 0) # # Simulate the existing_units # avail_groups, avail_cats = comps.get_available(repo_src_dir) existing_cats, existing_cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo) existing_groups, existing_group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo) self.assertEquals(len(existing_cats), 2) self.assertEquals(len(existing_groups), 3) existing_units = [] existing_units.extend(existing_group_units.values()) existing_units.extend(existing_cat_units.values()) self.assertEquals(len(existing_units), (len(existing_cats) + len(existing_groups))) # # Now we will simulate a change to the feed and pass in our existing units # repo_src_dir = os.path.join(self.data_dir, "test_orphaned_data_final") feed_url = "file://%s" % (repo_src_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units) self.simulate_sync(repo, repo_src_dir) status, summary, details = ic.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEqual(summary["num_available_groups"], 2) self.assertEqual(summary["num_available_categories"], 1) self.assertEqual(summary["num_new_groups"], 0) self.assertEqual(summary["num_new_categories"], 0) self.assertEqual(summary["num_orphaned_groups"], 1) self.assertEqual(summary["num_orphaned_categories"], 1) self.assertTrue(summary["time_total_sec"] > 0)
def test_config_skip(self): feed_url = "http://example.redhat.com/" skip_content_types = "" config = importer_mocks.get_basic_config(feed_url=feed_url, skip=skip_content_types) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) skip_content_types = [] config = importer_mocks.get_basic_config(feed_url=feed_url, skip=skip_content_types) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_ssl_verify(self): feed_url = "http://example.redhat.com/" ssl_verify = "fake" config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_verify=ssl_verify) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) ssl_verify = True config = importer_mocks.get_basic_config(feed_url=feed_url, ssl_verify=ssl_verify) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_proxy_url(self): feed_url = "http://example.redhat.com/" proxy_url = "fake://proxy" config = importer_mocks.get_basic_config(feed_url=feed_url, proxy_url=proxy_url) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) proxy_url = "http://proxy" config = importer_mocks.get_basic_config(feed_url=feed_url, proxy_url=proxy_url) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_verify_size(self): feed_url = "http://example.redhat.com/" verify_size = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, verify_size=verify_size) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) verify_size = True config = importer_mocks.get_basic_config(feed_url=feed_url, verify_size=verify_size) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_max_speed(self): feed_url = "http://example.redhat.com/" max_speed = "fake_speed" config = importer_mocks.get_basic_config(feed_url=feed_url, max_speed=max_speed) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) max_speed = 100 config = importer_mocks.get_basic_config(feed_url=feed_url, max_speed=max_speed) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_newest(self): feed_url = "http://example.redhat.com/" newest = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, newest=newest) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) newest = True config = importer_mocks.get_basic_config(feed_url=feed_url, newest=newest) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_remove_old(self): feed_url = "http://example.redhat.com/" remove_old = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=remove_old) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) remove_old = True config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=remove_old) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_num_old_packages(self): feed_url = "http://example.redhat.com/" num_old_packages = "fake_int" config = importer_mocks.get_basic_config(feed_url=feed_url, num_old_packages=num_old_packages) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) num_old_packages = 4 config = importer_mocks.get_basic_config(feed_url=feed_url, num_old_packages=num_old_packages) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_checksum_type(self): feed_url = "http://example.redhat.com/" checksum_type ="fake_checksum" config = importer_mocks.get_basic_config(feed_url=feed_url, checksum_type=checksum_type) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) checksum_type ="sha" config = importer_mocks.get_basic_config(feed_url=feed_url, checksum_type=checksum_type) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_config_purge_orphaned(self): feed_url = "http://example.redhat.com/" purge_orphaned = "fake_bool" config = importer_mocks.get_basic_config(feed_url=feed_url, purge_orphaned=purge_orphaned) state, msg = self.importer.validate_config(self.repo, config, []) self.assertFalse(state) purge_orphaned = True config = importer_mocks.get_basic_config(feed_url=feed_url, purge_orphaned=purge_orphaned) state, msg = self.importer.validate_config(self.repo, config, []) self.assertTrue(state)
def test_validate_config(self): feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" importer = YumImporter() config = importer_mocks.get_basic_config(feed_url=feed_url) repo = mock.Mock(spec=Repository) state, msg = importer.validate_config(repo, config, []) self.assertTrue(state) # Test that an unknown argument in the config throws an error # and the unknown arg is identified in the message config = importer_mocks.get_basic_config(feed_url=feed_url, bad_unknown_arg="blah") state, msg = importer.validate_config(repo, config, []) self.assertFalse(state) self.assertTrue("bad_unknown_arg" in msg)
def test_bandwidth_limit(self): # This test assumes an available bandwidth of more than 100KB for 2 threads feed_url = 'http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/test_bandwidth_repo_smaller/' expected_size_bytes = 209888 # 5 1MB RPMs are in this repo expected_num_packages = 2 num_threads = 2 max_speed = 25 # KB/sec repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_bandwidth_limit" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=num_threads, max_speed=max_speed) start = time.time() importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) end = time.time() self.assertTrue(status) self.assertEquals(summary["num_synced_new_rpms"], expected_num_packages) self.assertEquals(summary["num_resynced_rpms"], 0) self.assertEquals(summary["num_not_synced_rpms"], 0) self.assertEquals(summary["num_orphaned_rpms"], 0) self.assertEquals(details["size_total"], expected_size_bytes) expected = (float(expected_size_bytes)/(num_threads*max_speed*1000)) actual_A = end - start self.assertTrue(actual_A > expected) # # Clean up and resync with no bandwidth limit # Ensure result is quicker than above # max_speed = 0 self.clean() self.init() repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_bandwidth_limit" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=num_threads, max_speed=max_speed) start = time.time() importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) end = time.time() self.assertTrue(status) self.assertEquals(summary["num_synced_new_rpms"], expected_num_packages) self.assertEquals(summary["num_resynced_rpms"], 0) self.assertEquals(summary["num_not_synced_rpms"], 0) self.assertEquals(summary["num_orphaned_rpms"], 0) self.assertEquals(details["size_total"], expected_size_bytes)
def test_orphaned_distributions(self): feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_repo" sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(status) dunit_key = {} dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64" dunit_key['version'] = "16" dunit_key['arch'] = "x86_64" dunit_key['family'] = "TestFamily" dunit_key['variant'] = "TestVariant" metadata = { "files" : [{"checksumtype" : "sha256", "relativepath" : "images/fileA.txt", "fileName" : "fileA.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.working_dir, "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab", "filename" : "fileA.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileB.txt", "fileName" : "fileB.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.working_dir, "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4", "filename" : "fileB.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileC.iso", "fileName" : "fileC.iso", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.working_dir, "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74", "filename" : "fileC.iso", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 } ],} distro_unit = Unit(distribution.TYPE_ID_DISTRO, dunit_key, metadata, '') distro_unit.storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir new_feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/zoo/" sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir, existing_units=[distro_unit]) config = importer_mocks.get_basic_config(feed_url=new_feed_url) status, summary, details = importerRPM.sync(repo, sync_conduit, config) print status, summary, details self.assertTrue(status) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertEquals(summary["num_orphaned_distributions"], 1)
def test_validate_str(self): parameters = {importer_constants.KEY_PROXY_PORT: '3128', importer_constants.KEY_PROXY_HOST: 'http://test.com', importer_constants.KEY_FEED: 'http://test.com'} config = importer_mocks.get_basic_config(**parameters) status, error_message = configuration.validate(config) self.assertTrue(status is True) self.assertEqual(error_message, None)
def test_invalid_config(self): config = importer_mocks.get_basic_config(**{importer_constants.KEY_MAX_SPEED: -1.0, importer_constants.KEY_FEED: 'http://test.com'}) status, error_message = configuration.validate(config) self.assertTrue(status is False) self.assertEqual(error_message, 'The configuration parameter <max_speed> must be set to a positive ' 'numerical value, but is currently set to <-1.0>.')
def test_client_key_requires_client_cert(self): config = importer_mocks.get_basic_config(**{importer_constants.KEY_SSL_CLIENT_KEY: 'Client Key!', importer_constants.KEY_FEED: 'http://test.com'}) status, error_message = configuration.validate(config) self.assertTrue(status is False) self.assertEqual(error_message, 'The configuration parameter <ssl_client_key> requires the ' '<ssl_client_cert> parameter to also be set.')
def test_client_cert_is_non_string(self): config = importer_mocks.get_basic_config(**{importer_constants.KEY_SSL_CLIENT_CERT: 8, importer_constants.KEY_FEED: 'http://test.com'}) status, error_message = configuration.validate(config) self.assertTrue(status is False) self.assertEqual(error_message, "The configuration parameter <ssl_client_cert> should be a string, " "but it was <type 'int'>.")
def test_validate(self): config = importer_mocks.get_basic_config( **{importer_constants.KEY_FEED: "http://test.com/feed", importer_constants.KEY_MAX_SPEED: 56.6, importer_constants.KEY_MAX_DOWNLOADS: 3}) status, error_message = configuration.validate(config) self.assertTrue(status is True) self.assertEqual(error_message, None)
def test_download_succeeded_honors_validate_downloads_set_true(self, download_failed): """ We have a setting that makes download validation optional. This test ensures that download_succeeded() honors that setting. """ # In this config, we will set validate_downloads to False, which should make our "wrong_checksum" OK config = importer_mocks.get_basic_config(feed_url='http://fake.com/iso_feed/', validate_downloads=True) iso_sync_run = ISOSyncRun(self.sync_conduit, config) destination = os.path.join(self.temp_dir, 'test.txt') with open(destination, 'w') as test_file: test_file.write('Boring test data.') unit = 'fake_unit' iso = {'name': 'test.txt', 'size': 114, 'destination': destination, 'checksum': 'wrong checksum', 'unit': unit, 'url': 'http://fake.com'} report = DownloadReport(iso['url'], destination) # Let's fake having downloaded the whole file iso['bytes_downloaded'] = iso['size'] report.bytes_downloaded = iso['size'] # We need to put this on the url_iso_map so that the iso can be retrieved for validation iso_sync_run._url_iso_map = {iso['url']: iso} iso_sync_run.progress_report.isos_state = STATE_RUNNING iso_sync_run.download_succeeded(report) # Because we fail validation, the save_unit step will not be called self.assertEqual(self.sync_conduit.save_unit.call_count, 0) # The download should be marked failed self.assertEqual(download_failed.call_count, 1) download_failed.assert_called_once_with(report)
def test_required_when_other_parameters_are_present(self): for parameters in [ {importer_constants.KEY_MAX_SPEED: '1024'}, {importer_constants.KEY_MAX_DOWNLOADS: 2}, {importer_constants.KEY_PROXY_PASS: '******', importer_constants.KEY_PROXY_USER: '******', importer_constants.KEY_PROXY_HOST: 'http://test.com'}, {importer_constants.KEY_PROXY_HOST: 'http://test.com', importer_constants.KEY_PROXY_PORT: '3037'}, {importer_constants.KEY_PROXY_HOST: 'http://test.com'}, {importer_constants.KEY_UNITS_REMOVE_MISSING: True}, {importer_constants.KEY_SSL_CA_CERT: 'cert'}, {importer_constants.KEY_SSL_CLIENT_CERT: 'cert'}, {importer_constants.KEY_SSL_CLIENT_CERT: 'cert', importer_constants.KEY_SSL_CLIENT_KEY: 'key'}, {importer_constants.KEY_VALIDATE: True}]: # Each of the above configurations should cause the validator to complain about the feed_url # missing config = importer_mocks.get_basic_config(**parameters) status, error_message = configuration.validate(config) self.assertTrue(status is False) self.assertEqual( error_message, 'The configuration parameter <%(feed)s> is required when any of the following other ' 'parameters are defined: %(max_speed)s, %(num_threads)s, %(proxy_pass)s, %(proxy_port)s, ' '%(proxy_host)s, %(proxy_user)s, %(remove_missing_units)s, %(ssl_ca_cert)s, ' '%(ssl_client_cert)s, %(ssl_client_key)s, %(validate_units)s.' % { 'feed': importer_constants.KEY_FEED, 'max_speed': importer_constants.KEY_MAX_SPEED, 'num_threads': importer_constants.KEY_MAX_DOWNLOADS, 'proxy_pass': importer_constants.KEY_PROXY_PASS, 'proxy_port': importer_constants.KEY_PROXY_PORT, 'proxy_host': importer_constants.KEY_PROXY_HOST, 'proxy_user': importer_constants.KEY_PROXY_USER, 'remove_missing_units': importer_constants.KEY_UNITS_REMOVE_MISSING, 'ssl_ca_cert': importer_constants.KEY_SSL_CA_CERT, 'ssl_client_cert': importer_constants.KEY_SSL_CLIENT_CERT, 'ssl_client_key': importer_constants.KEY_SSL_CLIENT_KEY, 'validate_units': importer_constants.KEY_VALIDATE})
def test_local_sync_with_packages_in_subdir(self): feed_url = "file://%s/repo_packages_in_subdirs/" % (self.data_dir) repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_local_sync_with_packages_in_subdir" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertTrue(status) self.assertEquals(summary["num_synced_new_rpms"], 3) self.assertEquals(summary["num_resynced_rpms"], 0) self.assertEquals(summary["num_not_synced_rpms"], 0) self.assertEquals(summary["num_orphaned_rpms"], 0) self.assertEquals(details["size_total"], 6868) # Confirm regular RPM files exist under self.pkg_dir pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir) self.assertEquals(len(pkgs), 3) for p in pkgs: self.assertTrue(os.path.isfile(p)) # Confirm symlinks to RPMs exist under repo.working_dir sym_links = self.get_files_in_dir("*.rpm", repo.working_dir) self.assertEquals(len(pkgs), 3) for link in sym_links: self.assertTrue(os.path.islink(link))
def setUp(self): self.config = importer_mocks.get_basic_config( feed_url='http://fake.com/iso_feed/', max_speed=500.0, num_threads=5, ssl_client_cert="Trust me, I'm who I say I am.", ssl_client_key="Secret Key", ssl_ca_cert="Uh, I guess that's the right server.", proxy_url='http://proxy.com', proxy_port=1234, proxy_user="******", proxy_password='******') self.temp_dir = tempfile.mkdtemp() self.pkg_dir = os.path.join(self.temp_dir, 'content') os.mkdir(self.pkg_dir) # These checksums correspond to the checksums of the files that our curl mocks will generate. Our # curl mocks do not have a test4.iso, so that one is to test removal of old ISOs during sync self.existing_units = [ Unit(TYPE_ID_ISO, {'name': 'test.iso', 'size': 16, 'checksum': 'f02d5a72cd2d57fa802840a76b44c6c6920a8b8e6b90b20e26c03876275069e0'}, {}, '/path/test.iso'), Unit(TYPE_ID_ISO, {'name': 'test2.iso', 'size': 22, 'checksum': 'c7fbc0e821c0871805a99584c6a384533909f68a6bbe9a2a687d28d9f3b10c16'}, {}, '/path/test2.iso'), Unit(TYPE_ID_ISO, {'name': 'test4.iso', 'size': 4, 'checksum': 'sum4'}, {}, '/path/test4.iso')] self.sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_ISO, pkg_dir=self.pkg_dir, existing_units=self.existing_units) self.iso_sync_run = ISOSyncRun(self.sync_conduit, self.config)
def test_progress_sync(self): global updated_progress updated_progress = None def set_progress(progress): global updated_progress updated_progress = progress feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" importer = YumImporter() repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_progress_sync" sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir) sync_conduit.set_progress = mock.Mock() sync_conduit.set_progress.side_effect = set_progress config = importer_mocks.get_basic_config(feed_url=feed_url) status, summary, details = importer._sync_repo(repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["packages"]["num_synced_new_rpms"], 3) self.assertTrue(updated_progress is not None) self.assertTrue("metadata" in updated_progress) self.assertTrue(updated_progress["metadata"].has_key("state")) self.assertTrue("errata" in updated_progress) self.assertTrue(updated_progress["errata"].has_key("state")) self.assertTrue("content" in updated_progress) self.assertTrue(updated_progress["content"].has_key("state")) self.assertEquals(updated_progress["content"]["state"], "FINISHED") for key in importer_rpm.PROGRESS_REPORT_FIELDS: self.assertTrue(key in updated_progress["content"])
def test_perform_sync_remove_missing_units_set_false(self, curl_multi, curl): # Make sure the missing ISOs don't get removed if they aren't supposed to config = importer_mocks.get_basic_config( feed_url='http://fake.com/iso_feed/', max_speed=500.0, num_threads=5, proxy_url='http://proxy.com', proxy_port=1234, proxy_user="******", proxy_password='******', remove_missing_units=False, ssl_client_cert="Trust me, I'm who I say I am.", ssl_client_key="Secret Key", ssl_ca_cert="Uh, I guess that's the right server.") iso_sync_run = ISOSyncRun(self.sync_conduit, config) repo = MagicMock(spec=Repository) working_dir = os.path.join(self.temp_dir, "working") os.mkdir(working_dir) repo.working_dir = working_dir report = iso_sync_run.perform_sync() # There should now be three Units in the DB units = [tuple(call)[1][0] for call in self.sync_conduit.save_unit.mock_calls] self.assertEqual(len(units), 1) expected_unit = {'checksum': '94f7fe923212286855dea858edac1b4a292301045af0ddb275544e5251a50b3c', 'size': 34, 'contents': 'Are you starting to get the idea?\n', 'name': 'test3.iso'} unit = units[0] self.assertEqual(unit.unit_key['checksum'], expected_unit['checksum']) self.assertEqual(unit.unit_key['size'], expected_unit['size']) expected_storage_path = os.path.join( self.pkg_dir, unit.unit_key['name'], unit.unit_key['checksum'], str(unit.unit_key['size']), unit.unit_key['name']) self.assertEqual(unit.storage_path, expected_storage_path) with open(unit.storage_path) as data: contents = data.read() self.assertEqual(contents, expected_unit['contents']) # There should be 0 calls to sync_conduit.remove_unit, since remove_missing_units is False by default self.assertEqual(self.sync_conduit.remove_unit.call_count, 0)
def test_repo_scratchpad_settings(self): global repo_scratchpad repo_scratchpad = {} def set_repo_scratchpad(data): global repo_scratchpad repo_scratchpad = data def get_repo_scratchpad(): global repo_scratchpad return repo_scratchpad feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/test_drpm_repo/" importer = YumImporter() repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_repo_scratchpad" sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir) sync_conduit.set_repo_scratchpad = mock.Mock() sync_conduit.set_repo_scratchpad.side_effect = set_repo_scratchpad sync_conduit.get_repo_scratchpad = mock.Mock() sync_conduit.get_repo_scratchpad.side_effect = get_repo_scratchpad config = importer_mocks.get_basic_config(feed_url=feed_url) importer._sync_repo(repo, sync_conduit, config) print "SCRATCHPAD %s" % repo_scratchpad self.assertEquals(repo_scratchpad['checksum_type'], 'sha256') self.assertTrue(repo_scratchpad.has_key("repodata")) self.assertTrue(repo_scratchpad["repodata"].has_key("prestodelta"))
def test_remove_packages(self): feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/" repo = mock.Mock(spec=Repository) repo.working_dir = self.working_dir repo.id = "test_remove_packages" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(status) self.assertEquals(summary["num_synced_new_rpms"], 3) self.assertEquals(len(self.get_files_in_dir("*.rpm", self.pkg_dir)), 3) self.assertEquals(len(self.get_files_in_dir("*.rpm", repo.working_dir)), 3) expected_rpms = self.get_expected_rpms_from_pulp_unittest(repo.id) # Confirm that both the RPM and the Symlink for each expected rpm does exist # Then run remove_unit # Confirm that both the RPM and the Symlink have been deleted from the file system for rpm in expected_rpms.values(): rpm_save_path = os.path.join(rpm["pkgpath"], rpm["filename"]) self.assertTrue(os.path.exists(rpm_save_path)) symlink_save_path = os.path.join(rpm["savepath"], rpm["filename"]) self.assertTrue(os.path.lexists(symlink_save_path)) unit = Unit(TYPE_ID_RPM, importer_rpm.form_rpm_unit_key(rpm), importer_rpm.form_rpm_metadata(rpm), rpm_save_path) importer_rpm.remove_unit(sync_conduit, unit) self.assertTrue(os.path.exists(rpm_save_path)) self.assertTrue(os.path.exists(symlink_save_path))