def test_publish_progress(self): global progress_status progress_status = None group_progress_status = None def set_progress(progress): global progress_status progress_status = progress PROGRESS_FIELDS = ["num_success", "num_error", "items_left", "items_total", "error_details"] publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False) distributor = GroupISODistributor() repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_progress_sync" repo_group = mock.Mock(spec=RepositoryGroup) repo_group.id = "test_group" repo_group.repo_ids = [repo.id,] repo_group.working_dir = self.group_working_dir publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress distributor.publish_group(repo_group, publish_conduit, config) print distributor.group_progress_status self.assertTrue(progress_status is not None) self.assertEqual(progress_status['group-id'], repo_group.id) self.assertTrue("rpms" in progress_status['repositories'][repo.id]) self.assertTrue(progress_status['repositories'][repo.id]["rpms"].has_key("state")) self.assertEqual(progress_status['repositories'][repo.id]["rpms"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status['repositories'][repo.id]["rpms"]) self.assertTrue("distribution" in progress_status['repositories'][repo.id]) self.assertTrue(progress_status['repositories'][repo.id]["distribution"].has_key("state")) self.assertEqual(progress_status['repositories'][repo.id]["distribution"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status['repositories'][repo.id]["distribution"]) self.assertTrue("errata" in progress_status['repositories'][repo.id]) self.assertTrue(progress_status['repositories'][repo.id]["errata"].has_key("state")) self.assertEqual(progress_status['repositories'][repo.id]["errata"]["state"], "FINISHED") self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") ISO_PROGRESS_FIELDS = ["num_success", "num_error", "items_left", "items_total", "error_details", "written_files", "current_file", "size_total", "size_left"] for field in ISO_PROGRESS_FIELDS: self.assertTrue( field in progress_status["isos"]) self.assertTrue("publish_http" in progress_status) self.assertEqual(progress_status["publish_http"]["state"], "FINISHED") self.assertTrue("publish_https" in progress_status) self.assertEqual(progress_status["publish_https"]["state"], "SKIPPED")
def test_group_publish_isos(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo_1 = mock.Mock(spec=Repository) repo_1.id = "test_repo_for_export_1" repo_1.working_dir = self.repo_working_dir repo_1.checksumtype = 'sha' repo_2 = mock.Mock(spec=Repository) repo_2.id = "test_repo_for_export_2" repo_2.working_dir = self.repo_working_dir repo_2.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo_1, sync_conduit, config) status, summary, details = importerRPM.sync(repo_2, sync_conduit, config) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() importerErrata.sync(repo_1, sync_conduit, config) importerErrata.sync(repo_2, sync_conduit, config) repo_group = mock.Mock(spec=RepositoryGroup) repo_group.id = "test_group" repo_group.repo_ids = [repo_1.id, repo_2.id] repo_group.working_dir = self.group_working_dir global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, iso_prefix="test-isos") distributor = GroupISODistributor() def cleanup(repo_working_dir): return iso_util.cleanup_working_dir.cleanup = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress distributor.publish_group(repo_group, publish_conduit, config) self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo_group.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo_group.id)) print isos_list self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue( isos_list[0].startswith("test-isos"))
class TestPublishGroup(unittest.TestCase): """ Tests publish_group in GroupISODistributor """ def setUp(self): """ The distributor retrieves a lot of stuff from the database. It also creates and removes directories and files. This class does not test the functionality of the methods that handle any of that, so many are replaced with mocks here """ self.group_distributor = GroupISODistributor() # Create arguments to be handed to the distributor self.config_dict = { PUBLISH_HTTP_KEYWORD: False, PUBLISH_HTTPS_KEYWORD: True } self.config = PluginCallConfiguration({}, self.config_dict) self.mock_conduit = mock.MagicMock(spec=RepoGroupPublishConduit) self.repo_group = RepositoryGroup('test-group', '', '', {}, ['repo_id'], '/dir') # We aren't testing _publish_isos here, so let's not call it self.group_distributor._publish_isos = mock.MagicMock(spec=GroupISODistributor._publish_isos) # Since the path is all messed up, patching stuff out with mock is difficult, so this mocks # out all the export utilities used. self.cleanup_working_dir = export_utils.cleanup_working_dir self.validate_export_config = export_utils.validate_export_config self.export_complete_repo = export_utils.export_complete_repo self.export_incremental = export_utils.export_incremental_content self.group_config = export_utils.retrieve_group_export_config self.rmtree = shutil.rmtree self.makedirs = os.makedirs export_utils.cleanup_working_dir = mock.Mock(spec=export_utils.cleanup_working_dir) export_utils.validate_export_config = mock.Mock(return_value=(True, None)) export_utils.export_complete_repo = mock.Mock(return_value=({}, {'errors': []})) export_utils.export_incremental_content = mock.Mock(return_value=({}, {'errors': {}})) export_utils.retrieve_group_export_config = mock.Mock(return_value=([('repo_id', '/dir')], None)) shutil.rmtree = mock.Mock(spec=shutil.rmtree) os.makedirs = mock.Mock(spec=os.makedirs) def tearDown(self): export_utils.cleanup_working_dir = self.cleanup_working_dir export_utils.validate_export_config = self.validate_export_config export_utils.export_complete_repo = self.export_complete_repo export_utils.export_incremental_content = self.export_incremental export_utils.retrieve_group_export_config = self.group_config shutil.rmtree = self.rmtree os.makedirs = self.makedirs def test_failed_override_config(self): """ Tests that when invalid override configuration is given, an exception is raised. """ # Setup export_utils.validate_export_config.return_value = (False, 'failed validation') # Test self.assertRaises(PulpDataException, self.group_distributor.publish_group, self.repo_group, self.mock_conduit, self.config) def test_clean_working_dir(self): """ Check that the working directory is cleaned before use. This is done because the ISOs are currently stored there """ self.group_distributor.publish_group(self.repo_group, self.mock_conduit, self.config) shutil.rmtree.assert_called_once_with(self.repo_group.working_dir, ignore_errors=True) os.makedirs.assert_called_once_with(self.repo_group.working_dir) def test_export_iso_publish(self): """ Test exporting a repository to ISO images. This happens when there is no export directory """ # Call publish_group self.group_distributor.publish_group(self.repo_group, self.mock_conduit, self.config) # Test that _publish_isos is called with the correct arguments self.assertEqual(1, self.group_distributor._publish_isos.call_count) self.assertEqual(self.repo_group, self.group_distributor._publish_isos.call_args[0][0]) self.assertEqual(self.config, self.group_distributor._publish_isos.call_args[0][1]) def test_export_complete_repo_call(self): """ Test that the export_complete_repo method is called with the correct arguments """ self.group_distributor.publish_group(self.repo_group, self.mock_conduit, self.config) self.assertEqual(1, export_utils.export_complete_repo.call_count) self.assertEqual('repo_id', export_utils.export_complete_repo.call_args[0][0]) self.assertEqual('/dir', export_utils.export_complete_repo.call_args[0][1]) self.assertEqual(self.config, export_utils.export_complete_repo.call_args[0][3]) def test_incremental_export_call(self): """ Test the the export_incremental_content method is called with the correct arguments """ # Setup retrieve_group_export_config return value to return a date filter export_utils.retrieve_group_export_config.return_value = ([('repo_id', '/dir')], 'filter') # Test that export_incremental_content was called correctly self.group_distributor.publish_group(self.repo_group, self.mock_conduit, self.config) self.assertEqual(1, export_utils.export_incremental_content.call_count) self.assertEqual('/dir', export_utils.export_incremental_content.call_args[0][0]) self.assertEqual('filter', export_utils.export_incremental_content.call_args[0][2]) def test_export_dir(self): """ Test that when an export directory is in the config, ISOs are not created """ # Setup self.config_dict[EXPORT_DIRECTORY_KEYWORD] = '/export/dir' config = PluginCallConfiguration({}, self.config_dict) # Test that _publish_isos is not called self.group_distributor.publish_group(self.repo_group, self.mock_conduit, config) self.assertEqual(0, self.group_distributor._publish_isos.call_count) def test_failed_publish(self): """ Test that when errors are reported, a failure report is generated """ # Setup. Insert an error in the details export_utils.export_complete_repo.return_value = ({}, {'errors': ['error_list']}) # Test self.group_distributor.publish_group(self.repo_group, self.mock_conduit, self.config) self.mock_conduit.build_failure_report.assert_called_once_with(self.group_distributor.summary, self.group_distributor.details)