def test_publish_repo_way_by_conditions(self, mock_get_working): """ Test conditions decides to do publish with fast_forward or force_full """ mock_get_working.return_value = self.temp_dir distributor = self.create_distributor_with_mocked_api_calls() target2 = os.path.join(self.temp_dir, "target2") distributor.get_hosting_locations.return_value.append(target2) # Publish a new repo with units with force full finally after trying fast forward units = [] for i in range(4): cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = "foo%d.rpm" % (i) cloned_unit.unit_key['checksum'] = "sum%s" % (1000000000 + i) units.append(cloned_unit) new_conduit = get_publish_conduit(existing_units=units) distributor.publish_repo(self.repo, new_conduit, PluginCallConfiguration({}, {}, {})) # Verify if do publish with force full finally after trying with fast forward self.assertEqual(distributor.get_hosting_locations.call_count, 3) # Publish the repo with units with fast forward for i in range(2): cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = "food%d.rpm" % (i) cloned_unit.unit_key['checksum'] = "sumd%s" % (1000000000 + i) units.append(cloned_unit) new_conduit = get_publish_conduit(existing_units=units) distributor.publish_repo(self.repo, new_conduit, PluginCallConfiguration({}, {}, {})) # Verify if do publish with fast forward self.assertEqual(distributor.get_hosting_locations.call_count, 4)
def test_republish_after_unit_removal(self, mock_get_working): """ This test checks for an issue[0] we had where publishing an ISO repository, removing an ISO, and then republishing would leave that removed ISO's symlink in the repository even though it had been removed from the manifest. This test asserts that the republished repository no longer contains the removed ISO. [0] https://bugzilla.redhat.com/show_bug.cgi?id=970795 :param delete_protected_repo: The mocked version of delete_protected_repo :type delete_protected_repo: function """ mock_get_working.return_value = self.temp_dir # Publish a repository distributor = self.create_distributor_with_mocked_api_calls() distributor.publish_repo(self.repo, self.publish_conduit, {}) target_file = os.path.join(self.target_dir, SAMPLE_RPM) # test if the link was created self.assertTrue(os.path.islink(target_file)) # publish a new repo with a different unit in it cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = 'foo.rpm' new_conduit = get_publish_conduit(existing_units=[cloned_unit, ]) distributor.publish_repo(self.repo, new_conduit, {}) # Make sure the new rpm is linked self.assertTrue(os.path.islink(os.path.join(self.target_dir, 'foo.rpm'))) # Ensure the old rpm is no longer included self.assertFalse(os.path.islink(target_file))
def test_republish_after_unit_removal(self): """ This test checks for an issue[0] we had where publishing an ISO repository, removing an ISO, and then republishing would leave that removed ISO's symlink in the repository even though it had been removed from the manifest. This test asserts that the republished repository no longer contains the removed ISO. [0] https://bugzilla.redhat.com/show_bug.cgi?id=970795 :param delete_protected_repo: The mocked version of delete_protected_repo :type delete_protected_repo: function """ # Publish a repository distributor = self.create_distributor_with_mocked_api_calls() distributor.publish_repo(self.repo, self.publish_conduit, {}) target_file = os.path.join(self.target_dir, SAMPLE_RPM) # test if the link was created self.assertTrue(os.path.islink(target_file)) # publish a new repo with a different unit in it cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = 'foo.rpm' new_conduit = get_publish_conduit(existing_units=[ cloned_unit, ]) distributor.publish_repo(self.repo, new_conduit, {}) # Make sure the new rpm is linked self.assertTrue( os.path.islink(os.path.join(self.target_dir, 'foo.rpm'))) # Ensure the old rpm is no longer included self.assertFalse(os.path.islink(target_file))
def setUp(self): self.temp_dir = tempfile.mkdtemp() self.target_dir = os.path.join(self.temp_dir, "target") self.repo = MagicMock(spec=Repository) self.repo.id = "foo" self.repo.working_dir = self.temp_dir self.unit = Unit('RPM', {'name': SAMPLE_RPM, 'size': 1, 'checksum': 'sum1'}, {}, os.path.join(DATA_DIR, SAMPLE_RPM)) self.publish_conduit = get_publish_conduit(existing_units=[self.unit, ])
def test_publish_repo_bson_doc_too_large(self, mock_get_working, force_full=False): """ It verifies if too many (>50k+) files will publish with force full to avoid the exception[0] "BSON document too large (20946918 bytes) - the connected serversupports BSON document sizes up to 16777216 bytes. [0] https://pulp.plan.io/issues/5058 """ mock_get_working.return_value = self.temp_dir distributor = self.create_distributor_with_mocked_api_calls() # publish a new repo with units in it units = [] for i in range(50001): cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = "foo%d.rpm" % (i) cloned_unit.unit_key['checksum'] = "sum%s" % (1000000000 + i) units.append(cloned_unit) new_conduit = get_publish_conduit( existing_units=units, last_published="2019-12-05 19:40:26.284627") distributor.publish_repo(self.repo, new_conduit, PluginCallConfiguration({}, {}, {})) # Verify if do publish with force full after trying with fast forward self.assertEqual(distributor.get_hosting_locations.call_count, 3) units = [] for i in range(5): cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = "fooa%d.rpm" % (i) cloned_unit.unit_key['checksum'] = "suma%s" % (1000000000 + i) units.append(cloned_unit) new_conduit = get_publish_conduit( existing_units=units, last_published="2019-12-05 19:40:26.284627") distributor.publish_repo(self.repo, new_conduit, PluginCallConfiguration({}, {}, {})) # Verify if do publish with fast forward self.assertEqual(distributor.get_hosting_locations.call_count, 4)
def test_first_publish_empty_repo(self, mock_get_working, force_full=False): mock_get_working.return_value = self.temp_dir # Publish an empty repository for the first time distributor = self.create_distributor_with_mocked_api_calls() config = PluginCallConfiguration({}, {}, {'force_full': force_full}) new_conduit = get_publish_conduit(existing_units=[], last_published=None) distributor.publish_repo(self.repo, new_conduit, config) manifest_file = os.path.join(self.target_dir, MANIFEST_FILENAME) # Ensure PULP_MANIFEST is created self.assertTrue(os.path.exists(manifest_file))
def setUp(self): self.temp_dir = tempfile.mkdtemp() self.target_dir = os.path.join(self.temp_dir, "target") self.repo = MagicMock(spec=Repository) self.repo.repo_id = "foo" self.repo.working_dir = self.temp_dir self.old_repo_model = MagicMock(spec=OldRepoModel) self.old_repo_model.repo_obj = self.repo self.old_repo_model.id = "foo" self.unit = MagicMock(unit_type_id='RPM', name=SAMPLE_RPM, size=1, checksum='sum1', unit_key={'name': SAMPLE_RPM, 'checksum': 'sum1', 'size': 1}) self.unit._storage_path = os.path.join(__file__, DATA_DIR, SAMPLE_RPM) self.publish_conduit = get_publish_conduit(existing_units=[self.unit, ])
def setUp(self): self.temp_dir = tempfile.mkdtemp() self.target_dir = os.path.join(self.temp_dir, "target") self.repo = MagicMock(spec=Repository) self.repo.repo_id = "foo" self.repo.working_dir = self.temp_dir self.old_repo_model = MagicMock(spec=OldRepoModel) self.old_repo_model.repo_obj = self.repo self.old_repo_model.id = "foo" self.unit = MagicMock(unit_type_id='RPM', name=SAMPLE_RPM, size=1, checksum='sum1', unit_key={'name': SAMPLE_RPM, 'checksum': 'sum1', 'size': 1}) self.unit.storage_path = os.path.join(__file__, DATA_DIR, SAMPLE_RPM) self.publish_conduit = get_publish_conduit(existing_units=[self.unit, ])
def test_republish_after_unit_removal(self, mock_get_working, force_full=True): """ This test checks for an issue[0] we had where publishing an ISO repository, removing an ISO, and then republishing would leave that removed ISO's symlink in the repository even though it had been removed from the manifest. This test asserts that the republished repository no longer contains the removed ISO. [0] https://bugzilla.redhat.com/show_bug.cgi?id=970795 :param delete_protected_repo: The mocked version of delete_protected_repo :type delete_protected_repo: function """ mock_get_working.return_value = self.temp_dir # Publish a repository distributor = self.create_distributor_with_mocked_api_calls() config = PluginCallConfiguration({}, {}, {'force_full': force_full}) distributor.publish_repo(self.repo, self.publish_conduit, config) target_file = os.path.join(self.target_dir, SAMPLE_RPM) # test if the link was created self.assertTrue(os.path.islink(target_file)) # publish a new repo with a different unit in it cloned_unit = copy.deepcopy(self.unit) cloned_unit.unit_key['name'] = 'foo.rpm' cloned_unit.unit_key['checksum'] = 'sum2' new_conduit = get_publish_conduit(existing_units=[ cloned_unit, ]) distributor.publish_repo(self.repo, new_conduit, PluginCallConfiguration({}, {}, {})) # Make sure the new rpm is linked self.assertTrue( os.path.islink(os.path.join(self.target_dir, 'foo.rpm'))) # Ensure the old rpm is no longer included self.assertFalse(os.path.islink(target_file)) # Ensure PULP_MANIFEST is updated correctly with open(os.path.join(self.target_dir, MANIFEST_FILENAME), 'r') as f: self.assertEqual(len(f.readlines()), 1) with open(os.path.join(self.target_dir, MANIFEST_FILENAME), 'r') as f: reader = csv.reader(f) row = reader.next() self.assertEquals(row[0], cloned_unit.unit_key['name']) self.assertEquals(row[1], cloned_unit.unit_key['checksum']) self.assertEquals(row[2], str(cloned_unit.unit_key['size']))
def setUp(self): self.existing_units = [ Unit(ids.TYPE_ID_ISO, {'name': 'test.iso', 'size': 1, 'checksum': 'sum1'}, {}, '/path/test.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test2.iso', 'size': 2, 'checksum': 'sum2'}, {}, '/path/test2.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test3.iso', 'size': 3, 'checksum': 'sum3'}, {}, '/path/test3.iso')] self.publish_conduit = get_publish_conduit( existing_units=self.existing_units) self.temp_dir = tempfile.mkdtemp() # Monkeypatch the publishing location so we don't try to write to /var self._original_iso_http_dir = constants.ISO_HTTP_DIR self._original_iso_https_dir = constants.ISO_HTTPS_DIR constants.ISO_HTTP_DIR = os.path.join(self.temp_dir, 'published', 'http', 'isos') constants.ISO_HTTPS_DIR = os.path.join(self.temp_dir, 'published', 'https', 'isos')
def setUp(self): self.existing_units = \ [ Unit(ids.TYPE_ID_ISO, {'name': 'test.iso', 'size': 1, 'checksum': 'sum1'}, {}, '/path/test.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test2.iso', 'size': 2, 'checksum': 'sum2'}, {}, '/path/test2.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test3.iso', 'size': 3, 'checksum': 'sum3'}, {}, '/path/test3.iso')] self.publish_conduit = get_publish_conduit( existing_units=self.existing_units) self.temp_dir = tempfile.mkdtemp() # Monkeypatch the publishing location so we don't try to write to /var self._original_iso_http_dir = constants.ISO_HTTP_DIR self._original_iso_https_dir = constants.ISO_HTTPS_DIR constants.ISO_HTTP_DIR = os.path.join(self.temp_dir, 'published', 'http', 'isos') constants.ISO_HTTPS_DIR = os.path.join(self.temp_dir, 'published', 'https', 'isos')
def test_publish_repo_unit_removal(self, mock_get_working, force_full=True): mock_get_working.return_value = self.temp_dir # Publish a repository distributor = self.create_distributor_with_mocked_api_calls() config = PluginCallConfiguration({}, {}, {'force_full': force_full}) distributor.publish_repo(self.repo, self.publish_conduit, config) target_file = os.path.join(self.target_dir, SAMPLE_RPM) # test if the link was created self.assertTrue(os.path.islink(target_file)) with open(os.path.join(self.target_dir, MANIFEST_FILENAME), 'r') as f: self.assertEqual(len(f.readlines()), 1) # Remove the unit new_conduit = get_publish_conduit(existing_units=[]) distributor.publish_repo(self.repo, new_conduit, config) # Ensure PULP_MANIFEST is updated correctly with open(os.path.join(self.target_dir, MANIFEST_FILENAME), 'r') as f: self.assertEqual(len(f.readlines()), 0) self.assertFalse(os.path.islink(target_file))