def test_skip_types_export(self): repo = mock.Mock(spec=Repository) repo.id = "testrepo" repo.working_dir = self.repo_working_dir distributor = ISODistributor() config = distributor_mocks.get_basic_config(http=True, https=False, skip=["rpm", "errata", "packagegroup"]) publish_conduit = distributor_mocks.get_publish_conduit(existing_units=[], pkg_dir=self.pkg_dir) report = distributor.publish_repo(repo, publish_conduit, config) print report.summary summary_keys = { "rpm": ["num_package_units_attempted", "num_package_units_exported", "num_package_units_errors"], "distribution": [ "num_distribution_units_attempted", "num_distribution_units_exported", "num_distribution_units_errors", ], "packagegroup": ["num_package_groups_exported", "num_package_categories_exported"], "erratum": ["num_errata_units_exported"], } # check rpm,packagegroup,erratum info is skipped for key in summary_keys["rpm"] + summary_keys["packagegroup"] + summary_keys["erratum"]: self.assertTrue(key not in report.summary) # check distro info is present and not skipped for key in summary_keys["distribution"]: self.assertTrue(key in report.summary)
def test_repo_export_isos(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "pulp_unittest" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() importerErrata.sync(repo, sync_conduit, config) repo.working_dir = "%s/%s" % (self.repo_working_dir, "export") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) # test https publish config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=False, https=True, generate_metadata=True) report = iso_distributor.publish_repo(repo, publish_conduit, config) print report self.assertTrue(os.path.exists("%s/%s" % (self.https_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.http_publish_dir)), 0) # test http publish config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False) report = iso_distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue( isos_list[-1].startswith(repo.id)) # test isoprefix: iso_prefix = "mock-iso-prefix" config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, iso_prefix=iso_prefix) report = iso_distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 2) print isos_list # make sure the iso name uses the prefix self.assertTrue( isos_list[-1].startswith(iso_prefix))
def test_cancel_publish_repo(self, mock_cancel_createrepo): """ Test cancel_publish_repo, which is not currently fully supported """ # Setup distributor = ISODistributor() distributor.working_dir = '/working/dir' # Test distributor.cancel_publish_repo('call_request', 'call_report') mock_cancel_createrepo.assert_called_once_with(distributor.working_dir)
class TestPublishIsos(unittest.TestCase): """ Tests the _publish_isos method in GroupISODistributor. This method decides what the publishing directories should be, cleans them up, and hands everything off to the publish_iso method in export_utils. """ def setUp(self): self.distributor = ISODistributor() self.repo = Repository('repo_id', working_dir='/working/dir') self.config = {PUBLISH_HTTP_KEYWORD: True, PUBLISH_HTTPS_KEYWORD: True} self.publish_iso = export_utils.publish_isos export_utils.publish_isos = mock.Mock() def tearDown(self): export_utils.publish_isos = self.publish_iso @mock.patch('shutil.rmtree', autospec=True) def test_publish_isos(self, mock_rmtree): """ Test that publish_isos is called with the expected arguments """ # Setup http_publish_dir = os.path.join(EXPORT_HTTP_DIR, self.repo.id) https_publish_dir = os.path.join(EXPORT_HTTPS_DIR, self.repo.id) # Test self.distributor._publish_isos(self.repo, PluginCallConfiguration({}, self.config)) self.assertEqual(2, mock_rmtree.call_count) self.assertEqual(http_publish_dir, mock_rmtree.call_args_list[0][0][0]) self.assertEqual(https_publish_dir, mock_rmtree.call_args_list[1][0][0]) export_utils.publish_isos.assert_called_once_with(self.repo.working_dir, self.repo.id, http_publish_dir, https_publish_dir, None, None) @mock.patch('shutil.rmtree', autospec=True) def test_publish_http_https_false(self, mock_rmtree): """ Test that when the config has publishing http and https set to false, publish_isos is called with None for https_dir and http_dir """ # Setup self.config[PUBLISH_HTTPS_KEYWORD] = False self.config[PUBLISH_HTTP_KEYWORD] = False self.distributor._publish_isos(self.repo, PluginCallConfiguration({}, self.config)) http_publish_dir = os.path.join(EXPORT_HTTP_DIR, self.repo.id) https_publish_dir = os.path.join(EXPORT_HTTPS_DIR, self.repo.id) # Test self.assertEqual(2, mock_rmtree.call_count) self.assertEqual(http_publish_dir, mock_rmtree.call_args_list[0][0][0]) self.assertEqual(https_publish_dir, mock_rmtree.call_args_list[1][0][0]) export_utils.publish_isos.assert_called_once_with(self.repo.working_dir, self.repo.id, None, None, None, None)
def test_set_progress(self): """ Test set_progress, which simply checks if the progress_callback is None before calling it """ # Setup mock_callback = mock.Mock() distributor = ISODistributor() # Test distributor.set_progress('id', 'status', mock_callback) mock_callback.assert_called_once_with('id', 'status')
def test_set_progress_no_callback(self): """ Assert that set_progress don't not attempt to call the callback when it is None """ # Setup distributor = ISODistributor() # Test try: distributor.set_progress('id', 'status', None) except AttributeError: self.fail('set_progress should not try to call None')
def test_validate_config_iso_prefix(self): repo = mock.Mock(spec=Repository) repo.id = "testrepo" distributor = ISODistributor() # test invalid iso prefix config = distributor_mocks.get_basic_config(http=True, https=False, iso_prefix="my_iso*_name_/") state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) # test valid iso prefix config = distributor_mocks.get_basic_config(http=True, https=False, iso_prefix="My_iso_name-01") state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state)
def test_generate_isos(self): repo = mock.Mock(spec=Repository) repo.id = "test_repo_for_export" repo.working_dir = self.repo_iso_working_dir + "/" + repo.id unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {'updated' : ''}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {'updated' : ''}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {'updated' : ''}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir, existing_units=existing_units) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, iso_prefix="test-isos") distributor = ISODistributor() def cleanup(repo_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress progress_status = distributor.init_progress() distributor.publish_repo(repo, publish_conduit, config) self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertEqual(progress_status["isos"]["num_success"], 1) self.assertTrue(progress_status["isos"]["size_total"] is not None) self.assertEqual(progress_status["isos"]["size_left"], 0) self.assertEqual(progress_status["isos"]["items_total"], 1) self.assertEqual(progress_status["isos"]["items_left"], 0) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue( isos_list[0].startswith("test-isos"))
def test_distribution_exports(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "pulp_unittest" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) dunit_key = {} dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64" dunit_key['version'] = "16" dunit_key['arch'] = "x86_64" dunit_key['family'] = "TestFamily" dunit_key['variant'] = "TestVariant" metadata = { "files" : [{"checksumtype" : "sha256", "relativepath" : "images/fileA.txt", "fileName" : "fileA.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab", "filename" : "fileA.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileB.txt", "fileName" : "fileB.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4", "filename" : "fileB.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileC.iso", "fileName" : "fileC.iso", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74", "filename" : "fileC.iso", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 } ],} distro_unit = Unit(distribution.TYPE_ID_DISTRO, dunit_key, metadata, '') distro_unit.storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir symlink_dir = "%s/%s" % (self.repo_working_dir, "isos") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=[distro_unit], pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) status, errors = iso_distributor._export_distributions([distro_unit], symlink_dir) print status, errors self.assertTrue(status) for file in metadata['files']: print os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])) self.assertTrue(os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])))
def test_publish_progress(self): global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress PROGRESS_FIELDS = ["num_success", "num_error", "items_left", "items_total", "error_details"] publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config( https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, ) distributor = ISODistributor() repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_progress_sync" publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(progress_status is not None) self.assertTrue("rpms" in progress_status) self.assertTrue(progress_status["rpms"].has_key("state")) self.assertEqual(progress_status["rpms"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status["rpms"]) self.assertTrue("distribution" in progress_status) self.assertTrue(progress_status["distribution"].has_key("state")) self.assertEqual(progress_status["distribution"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status["distribution"]) self.assertTrue("errata" in progress_status) self.assertTrue(progress_status["errata"].has_key("state")) self.assertEqual(progress_status["errata"]["state"], "FINISHED") self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertTrue("publish_http" in progress_status) self.assertEqual(progress_status["publish_http"]["state"], "FINISHED") self.assertTrue("publish_https" in progress_status) self.assertEqual(progress_status["publish_https"]["state"], "SKIPPED")
def test_generate_isos(self): repo = mock.Mock(spec=Repository) repo.id = "test_repo_for_export" repo.working_dir = self.repo_iso_working_dir global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config( https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, iso_prefix="test-isos", ) distributor = ISODistributor() def cleanup(repo_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress progress_status = distributor.init_progress() distributor.publish_repo(repo, publish_conduit, config) self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertEqual(progress_status["isos"]["num_success"], 1) self.assertTrue(progress_status["isos"]["size_total"] is not None) self.assertEqual(progress_status["isos"]["size_left"], 0) self.assertEqual(progress_status["isos"]["items_total"], 1) self.assertEqual(progress_status["isos"]["items_left"], 0) print progress_status self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) print isos_list self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue(isos_list[0].startswith("test-isos"))
def setUp(self): self.distributor = ISODistributor() self.repo = Repository('repo_id', working_dir='/working/dir') self.config = {PUBLISH_HTTP_KEYWORD: True, PUBLISH_HTTPS_KEYWORD: True} self.publish_iso = export_utils.publish_isos export_utils.publish_isos = mock.Mock()
def setUp(self): self.config_dict = { PUBLISH_HTTP_KEYWORD: False, PUBLISH_HTTPS_KEYWORD: True } # Set up the distributor self.distributor = ISODistributor() self.distributor._publish_isos = mock.Mock(spec=ISODistributor._publish_isos) # Arguments for the distributor self.repo = Repository(id='repo-id', working_dir='/working/dir') self.mock_conduit = mock.Mock(spec=RepoPublishConduit) self.config = PluginCallConfiguration({}, self.config_dict) # It's difficult to mock patch the export_utils, so do it here. self.cleanup_working_dir = export_utils.cleanup_working_dir self.validate_export_config = export_utils.validate_export_config self.export_complete_repo = export_utils.export_complete_repo self.export_incremental = export_utils.export_incremental_content self.retrieve_repo_config = export_utils.retrieve_repo_config self.rmtree = shutil.rmtree self.makdirs = os.makedirs export_utils.cleanup_working_dir = mock.Mock(spec=export_utils.cleanup_working_dir) export_utils.validate_export_config = mock.Mock(return_value=(True, None)) export_utils.export_complete_repo = mock.Mock(return_value=({}, {'errors': []})) export_utils.export_incremental_content = mock.Mock(return_value=({}, {'errors': ()})) export_utils.retrieve_repo_config = mock.Mock(return_value=('/working/dir', None)) shutil.rmtree = mock.Mock(spec=shutil.rmtree) os.makedirs = mock.Mock(spec=os.makedirs)
def test_validate_config(self): """ Test the validate_config method in ISODistributor, which just hands the config off to a helper method in export_utils """ # Setup validate_config = export_utils.validate_export_config export_utils.validate_export_config = mock.MagicMock() distributor = ISODistributor() # Test. All validate_config should do is hand the config argument to the export_utils validator distributor.validate_config(None, 'config', None) export_utils.validate_export_config.assert_called_once_with('config') # Clean up export_utils.validate_export_config = validate_config
def test_metadata(self): """ Test the overridden metadata method in ISODistributor """ metadata = ISODistributor.metadata() expected_types = [TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM, TYPE_ID_ERRATA, TYPE_ID_DISTRO, TYPE_ID_PKG_CATEGORY, TYPE_ID_PKG_GROUP] self.assertEquals(metadata['id'], TYPE_ID_DISTRIBUTOR_EXPORT) self.assertEqual(set(expected_types), set(metadata['types']))
def test_metadata(self): metadata = ISODistributor.metadata() self.assertEquals(metadata["id"], TYPE_ID_DISTRIBUTOR_ISO) for type in [ TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM, TYPE_ID_ERRATA, TYPE_ID_DISTRO, TYPE_ID_PKG_CATEGORY, TYPE_ID_PKG_GROUP, ]: self.assertTrue(type in metadata["types"])
def test_export_rpm(self): feed_url = "file://%s/test_repo_for_export/" % (self.data_dir) repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_repo_for_export" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertTrue(status) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) symlink_dir = "%s/%s" % (self.repo_working_dir, "isos") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir status, errors = iso_distributor._export_rpms(existing_units, symlink_dir) print status, errors self.assertTrue(status) self.assertEquals(len(os.listdir(symlink_dir)), 3)
def test_iso_export_by_date_range(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = "sha" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {} existing_units = [] unit_key_a = { "id": "", "name": "patb", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "sha", "checksum": "017c12050a97cf6095892498750c2a39d2bf535e", } rpm_unit_a = Unit(TYPE_ID_RPM, unit_key_a, metadata, "") rpm_unit_a.storage_path = ( "%s/patb/0.1/2/noarch/017c12050a97cf6095892498750c2a39d2bf535e/patb-0.1-2.noarch.rpm" % self.pkg_dir ) existing_units.append(rpm_unit_a) unit_key_b = { "id": "", "name": "emoticons", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "sha", "checksum": "663c89b0d29bfd5479d8736b716d50eed9495dbb", } rpm_unit_b = Unit(TYPE_ID_RPM, unit_key_b, metadata, "") rpm_unit_b.storage_path = ( "%s/emoticons/0.1/2/noarch/663c89b0d29bfd5479d8736b716d50eed9495dbb/emoticons-0.1-2.noarch.rpm" % self.pkg_dir ) existing_units.append(rpm_unit_b) sync_conduit = importer_mocks.get_sync_conduit( type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir ) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key["id"] = "RHEA-2010:9999" mdata = { "description": "test", "from_str": "*****@*****.**", "issued": "2010-03-30 08:07:30", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("sha", "017c12050a97cf6095892498750c2a39d2bf535e"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("sha", "663c89b0d29bfd5479d8736b716d50eed9495dbb"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2010-03-30 08:07:30", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } unit_key_2 = dict() unit_key_2["id"] = "RHEA-2008:9999" mdata_2 = { "description": "test", "from_str": "*****@*****.**", "issued": "2008-03-30 00:00:00", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("sha", "017c12050a97cf6095892498750c2a39d2bf535e"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("sha", "663c89b0d29bfd5479d8736b716d50eed9495dbb"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2008-03-30 00:00:00", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, ""), Unit(TYPE_ID_ERRATA, unit_key_2, mdata_2, "")] existing_units += errata_unit print existing_units repo.working_dir = "%s/%s" % (self.repo_working_dir, "export") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) # test http publish config = distributor_mocks.get_basic_config( http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, start_date="2009-03-30 08:07:30", end_date="2012-03-30 08:07:30", generate_metadata=True, ) def cleanup(rpm_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup report = iso_distributor.publish_repo(repo, publish_conduit, config) ftypes = util.get_repomd_filetypes("%s/%s" % (repo.working_dir, "repodata/repomd.xml")) self.assertTrue("updateinfo" in ftypes) updateinfo_path = "%s/%s" % (repo.working_dir, "updateinfo.xml") self.assertTrue(os.path.exists(updateinfo_path)) elist = updateinfo.get_errata(updateinfo_path) self.assertEquals(len(elist), 1) self.assertTrue(unit_key_2["id"] not in elist[0]) self.assertEquals(elist[0]["id"], unit_key["id"]) self.assertEquals(elist[0]["issued"], mdata["issued"]) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1)
def test_errata_export(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {} unit_key_a = {'id' : '','name' :'patb', 'version' :'0.1', 'release' : '2', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'md5', 'checksum': 'f3c197a29d9b66c5b65c5d62b25db5b4'} unit_key_b = {'id' : '', 'name' :'emoticons', 'version' :'0.1', 'release' :'2', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'md5', 'checksum' : '366bb5e73a5905eacb82c96e0578f92b'} existing_units = [] for unit in [unit_key_a, unit_key_b]: existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, '')) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key['id'] = "RHEA-2010:9999" mdata = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2010-03-30 08:07:30', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('md5', 'f3c197a29d9b66c5b65c5d62b25db5b4'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('md5', '366bb5e73a5905eacb82c96e0578f92b'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2010-03-30 08:07:30', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, '')] symlink_dir = "%s/%s" % (self.repo_working_dir, repo.id) iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir rpm_units = iso_distributor._get_errata_rpms(errata_unit, existing_units) print "RPMS in ERRATA",rpm_units iso_distributor._export_rpms(rpm_units, self.repo_working_dir) status, errors = iso_distributor._export_errata(errata_unit, symlink_dir) self.assertTrue(os.path.exists("%s/%s" % (symlink_dir, "updateinfo.xml"))) self.assertTrue(status) ftypes = util.get_repomd_filetypes("%s/%s" % (symlink_dir, "repodata/repomd.xml")) print ftypes self.assertTrue("updateinfo" in ftypes)
def test_iso_export_by_date_range(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {'updated' : '2010-03-30 08:07:30'} existing_units = [] unit_key_a = {'id' : '','name' :'patb', 'version' :'0.1', 'release' : '2', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha', 'checksum': '017c12050a97cf6095892498750c2a39d2bf535e'} rpm_unit_a = Unit(TYPE_ID_RPM, unit_key_a, metadata, '') rpm_unit_a.storage_path = "%s/patb/0.1/2/noarch/017c12050a97cf6095892498750c2a39d2bf535e/patb-0.1-2.noarch.rpm" % self.pkg_dir existing_units.append(rpm_unit_a) unit_key_b = {'id' : '', 'name' :'emoticons', 'version' :'0.1', 'release' :'2', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha', 'checksum' : '663c89b0d29bfd5479d8736b716d50eed9495dbb'} rpm_unit_b = Unit(TYPE_ID_RPM, unit_key_b, metadata, '') rpm_unit_b.storage_path = "%s/emoticons/0.1/2/noarch/663c89b0d29bfd5479d8736b716d50eed9495dbb/emoticons-0.1-2.noarch.rpm" % self.pkg_dir existing_units.append(rpm_unit_b) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key['id'] = "RHEA-2010:9999" mdata = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2010-03-30 08:07:30', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('sha', '017c12050a97cf6095892498750c2a39d2bf535e'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('sha', '663c89b0d29bfd5479d8736b716d50eed9495dbb'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2010-03-30 08:07:30', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} unit_key_2 = dict() unit_key_2['id'] = "RHEA-2008:9999" mdata_2 = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2008-03-30 00:00:00', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('sha', '017c12050a97cf6095892498750c2a39d2bf535e'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('sha', '663c89b0d29bfd5479d8736b716d50eed9495dbb'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2008-03-30 00:00:00', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, ''), Unit(TYPE_ID_ERRATA, unit_key_2, mdata_2, '')] existing_units += errata_unit print existing_units repo.working_dir = "%s/%s" % (self.repo_working_dir, "export") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) # test http publish config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, start_date="2009-03-30 08:07:30", end_date="2012-03-30 08:07:30", generate_metadata=True) def cleanup(rpm_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup report = iso_distributor.publish_repo(repo, publish_conduit, config) ftypes = util.get_repomd_filetypes("%s/%s" % (repo.working_dir, "repodata/repomd.xml")) self.assertTrue("updateinfo" in ftypes) updateinfo_path = util.get_repomd_filetype_path("%s/%s" % (repo.working_dir, "repodata/repomd.xml"), "updateinfo") updateinfo_path = os.path.join(repo.working_dir, updateinfo_path) self.assertTrue(os.path.exists(updateinfo_path)) elist = updateinfo.get_errata(updateinfo_path) self.assertEquals(len(elist), 1) self.assertTrue(unit_key_2['id'] not in elist[0]) self.assertEquals(elist[0]['id'], unit_key['id']) self.assertEquals(elist[0]['issued'], mdata['issued']) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1)
def test_errata_export(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = "sha" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {} unit_key_a = { "id": "", "name": "patb", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "md5", "checksum": "f3c197a29d9b66c5b65c5d62b25db5b4", } unit_key_b = { "id": "", "name": "emoticons", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "md5", "checksum": "366bb5e73a5905eacb82c96e0578f92b", } existing_units = [] for unit in [unit_key_a, unit_key_b]: existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, "")) sync_conduit = importer_mocks.get_sync_conduit( type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir ) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key["id"] = "RHEA-2010:9999" mdata = { "description": "test", "from_str": "*****@*****.**", "issued": "2010-03-30 08:07:30", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("md5", "f3c197a29d9b66c5b65c5d62b25db5b4"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("md5", "366bb5e73a5905eacb82c96e0578f92b"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2010-03-30 08:07:30", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, "")] symlink_dir = "%s/%s" % (self.repo_working_dir, repo.id) iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir rpm_units = iso_distributor._get_errata_rpms(errata_unit, existing_units) print "RPMS in ERRATA", rpm_units iso_distributor._export_rpms(rpm_units, self.repo_working_dir) status, errors = iso_distributor._export_errata(errata_unit, symlink_dir) self.assertTrue(os.path.exists("%s/%s" % (symlink_dir, "updateinfo.xml"))) self.assertTrue(status) ftypes = util.get_repomd_filetypes("%s/%s" % (symlink_dir, "repodata/repomd.xml")) print ftypes self.assertTrue("updateinfo" in ftypes)
def test_validate_config(self): distributor = ISODistributor() repo = mock.Mock(spec=Repository) repo.id = "testrepo" http = "true" https = False config = distributor_mocks.get_basic_config(http=http, https=https) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) http = True config = distributor_mocks.get_basic_config(http=http, https=https) state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state) http = True https = "False" relative_url = "test_path" config = distributor_mocks.get_basic_config(http=http, https=https) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) https = True config = distributor_mocks.get_basic_config(http=http, https=https) state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state) http = True https = False relative_url = "test_path" skip_content_types = "fake" config = distributor_mocks.get_basic_config(http=http, https=https, skip=skip_content_types) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) skip_content_types = [] config = distributor_mocks.get_basic_config(http=http, https=https, skip=skip_content_types) state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state) # test invalid iso prefix config = distributor_mocks.get_basic_config(http=True, https=False, iso_prefix="my_iso*_name_/") state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) # test valid iso prefix config = distributor_mocks.get_basic_config(http=True, https=False, iso_prefix="My_iso_name-01") state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state) invalid_config="dummy" config = distributor_mocks.get_basic_config(invalid_config) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) http_publish_dir = self.http_publish_dir config = distributor_mocks.get_basic_config(http=http, https=https, http_publish_dir=http_publish_dir) state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state) http_publish_dir = "test" config = distributor_mocks.get_basic_config(http=http, https=https, http_publish_dir=http_publish_dir) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) https_publish_dir = self.https_publish_dir config = distributor_mocks.get_basic_config(http=http, https=https, https_publish_dir=https_publish_dir) state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state) https_publish_dir = "test" config = distributor_mocks.get_basic_config(http=http, https=https, https_publish_dir=https_publish_dir) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) if not M2CRYPTO_HAS_CRL_SUPPORT: return http = True https = False relative_url = "test_path" auth_cert = "fake" config = distributor_mocks.get_basic_config(http=http, https=https, https_ca=auth_cert) state, msg = distributor.validate_config(repo, config, []) self.assertFalse(state) auth_cert = open(os.path.join(self.data_dir, "cert.crt")).read() config = distributor_mocks.get_basic_config(http=http, https=https, https_ca=auth_cert) state, msg = distributor.validate_config(repo, config, []) self.assertTrue(state)
class TestPublishRepo(unittest.TestCase): """ Tests publish_repo in ISODistributor """ def setUp(self): self.config_dict = { PUBLISH_HTTP_KEYWORD: False, PUBLISH_HTTPS_KEYWORD: True } # Set up the distributor self.distributor = ISODistributor() self.distributor._publish_isos = mock.Mock(spec=ISODistributor._publish_isos) # Arguments for the distributor self.repo = Repository(id='repo-id', working_dir='/working/dir') self.mock_conduit = mock.Mock(spec=RepoPublishConduit) self.config = PluginCallConfiguration({}, self.config_dict) # It's difficult to mock patch the export_utils, so do it here. self.cleanup_working_dir = export_utils.cleanup_working_dir self.validate_export_config = export_utils.validate_export_config self.export_complete_repo = export_utils.export_complete_repo self.export_incremental = export_utils.export_incremental_content self.retrieve_repo_config = export_utils.retrieve_repo_config self.rmtree = shutil.rmtree self.makdirs = os.makedirs export_utils.cleanup_working_dir = mock.Mock(spec=export_utils.cleanup_working_dir) export_utils.validate_export_config = mock.Mock(return_value=(True, None)) export_utils.export_complete_repo = mock.Mock(return_value=({}, {'errors': []})) export_utils.export_incremental_content = mock.Mock(return_value=({}, {'errors': ()})) export_utils.retrieve_repo_config = mock.Mock(return_value=('/working/dir', None)) shutil.rmtree = mock.Mock(spec=shutil.rmtree) os.makedirs = mock.Mock(spec=os.makedirs) def tearDown(self): export_utils.cleanup_working_dir = self.cleanup_working_dir export_utils.validate_export_config = self.validate_export_config export_utils.export_complete_repo = self.export_complete_repo export_utils.export_incremental_content = self.export_incremental export_utils.retrieve_repo_config = self.retrieve_repo_config shutil.rmtree = self.rmtree os.makedirs = self.makdirs def test_failed_override_config(self): """ Tests that when invalid override configuration is given, an exception is raised. """ # Setup export_utils.validate_export_config.return_value = (False, 'failed validation') # Test self.assertRaises(PulpDataException, self.distributor.publish_repo, self.repo, self.mock_conduit, self.config) def test_working_dir_cleanup(self): """ Check that the working directory is cleaned before use. This is done because the ISOs are currently stored there """ self.distributor.publish_repo(self.repo, self.mock_conduit, self.config) shutil.rmtree.assert_called_once_with(self.repo.working_dir, ignore_errors=True) os.makedirs.assert_called_once_with(self.repo.working_dir) def test_export_with_export_dir(self): """ Test that _publish_isos isn't called when there is an export directory in the config, and that the correct working directory is used. """ # Set the config to have an export directory self.config_dict[EXPORT_DIRECTORY_KEYWORD] = '/my/export/dir' config = PluginCallConfiguration({}, self.config_dict) # Test self.distributor.publish_repo(self.repo, self.mock_conduit, config) self.assertEqual(0, self.distributor._publish_isos.call_count) self.assertEqual(1, self.mock_conduit.build_success_report.call_count) def test_export_iso_publish(self): """ Test that _publish_iso gets called when an export dir isn't in the config """ self.distributor.publish_repo(self.repo, self.mock_conduit, self.config) self.assertEqual(1, self.distributor._publish_isos.call_count) self.assertEqual(self.repo, self.distributor._publish_isos.call_args[0][0]) self.assertEqual(self.config, self.distributor._publish_isos.call_args[0][1]) self.assertEqual(1, self.mock_conduit.build_success_report.call_count) def test_export_complete_repo(self): """ Test that when a date filter doesn't exist, export_complete_repo is called """ self.distributor.publish_repo(self.repo, self.mock_conduit, self.config) self.assertEqual(1, export_utils.export_complete_repo.call_count) self.assertEqual('repo-id', export_utils.export_complete_repo.call_args[0][0]) self.assertEqual('/working/dir', export_utils.export_complete_repo.call_args[0][1]) self.assertEqual(self.config, export_utils.export_complete_repo.call_args[0][3]) def test_export_incremental(self): """ Test that when a date filter is not None, export_incremental_content is called """ # Setup export_utils.retrieve_repo_config.return_value = ('/working/dir', 'filter') # Test self.distributor.publish_repo(self.repo, self.mock_conduit, self.config) self.assertEqual(1, export_utils.export_incremental_content.call_count) self.assertEqual('/working/dir', export_utils.export_incremental_content.call_args[0][0]) self.assertEqual('filter', export_utils.export_incremental_content.call_args[0][2]) def test_failed_publish(self): """ Confirm that when the details dict contains errors, a failure report is generated """ # Setup self.distributor.details['errors'] = ['critical_error_thingy'] export_utils.export_complete_repo.return_value = ({}, {'errors': ['thousands of them']}) # Test self.distributor.publish_repo(self.repo, self.mock_conduit, self.config) self.assertEqual(1, self.mock_conduit.build_failure_report.call_count)