def setUp(self): super(Migration0004Tests, self).setUp() # Special way to import modules that start with a number self.migration = _import_all_the_way( 'pulp_rpm.plugins.migrations.0004_pkg_group_category_repoid') factory.initialize() api.initialize(False) types_db.update_database([TYPE_DEF_GROUP, TYPE_DEF_CATEGORY]) # Create the repositories necessary for the tests self.source_repo_id = 'source-repo' # where units were copied from with the bad code self.dest_repo_id = 'dest-repo' # where bad units were copied to source_repo = model.Repository(repo_id=self.source_repo_id) source_repo.save() dest_repo = model.Repository(repo_id=self.dest_repo_id) dest_repo.save() source_importer = model.Importer(self.source_repo_id, 'yum_importer', {}) source_importer.save() dest_importer = model.Importer(self.dest_repo_id, 'yum_importer', {}) dest_importer.save()
def test_create_i18n(self): """ Test use of international text for fields that are not repo_id. """ i18n_text = 'Brasília' repo_obj = model.Repository('limited_characters', i18n_text, i18n_text) repo_obj.validate()
def test_update_from_delta_skips_prohibited_fields(self): """ Attempt to update a prohibited field. Make sure it is ignored. """ repo_obj = model.Repository('mock_repo') repo_obj.update_from_delta({'repo_id': 'id_updated'}) self.assertEqual(repo_obj.repo_id, 'mock_repo')
def test_get_remove_unit(self): """ Tests retrieving units through the conduit and removing them. """ model.Repository(repo_id='repo-1').save() unit_1_key = {'key-1': 'unit_1'} unit_1_metadata = {'meta_1': 'value_1'} unit_1 = self.conduit.init_unit(TYPE_1_DEF.id, unit_1_key, unit_1_metadata, '/foo/bar') self.conduit.save_unit(unit_1) # Test - get_units units = self.conduit.get_units() # Verify returned units self.assertEqual(1, len(units)) self.assertEqual(unit_1_key, units[0].unit_key) self.assertTrue(units[0].id is not None) # Test - remove_units self.conduit.remove_unit(units[0]) # Verify repo association removed in the database associated_units = list(RepoContentUnit.get_collection().find({'repo_id': 'repo-1'})) self.assertEqual(0, len(associated_units)) # Verify the unit itself is still in the database db_unit = self.query_manager.get_content_unit_by_id(TYPE_1_DEF.id, unit_1.id) self.assertTrue(db_unit is not None)
def setUp(self): super(TestSyncRepo, self).setUp() self.repo = Repository('repo1', working_dir='/a/b/c') self.repo.repo_obj = model.Repository(repo_id=self.repo.id) self.sync_conduit = mock.MagicMock() self.config = mock.MagicMock() self.importer = DockerImporter()
def test_migration_duplicate_unit(self, mock_modules, mock_dist, mock_association, mock_controller, mock_repo, mock_open): module_foo = Module(name='kung-foo', version='0.1.2', author='kung') module_bar = Module(name='foo', version='0.1.2', author='kung') module_bar.first = Mock() mock_modules.filter.side_effect = ([module_foo], module_bar) module_foo.save = Mock() module_foo.save.side_effect = NotUniqueError() repo_association = model.RepositoryContentUnit( repo_id='test_repo', unit_type_id='puppet_module', unit_id='bar') test_repo = model.Repository(repo_id='test_repo') mock_repo.get_repo_or_missing_resource.return_value = test_repo mock_association.filter.return_value = [repo_association] migration.migrate() module_foo.save.assert_called_once_with() mock_association.filter.assert_called_once_with(unit_id=module_foo.id) mock_modules.filter.assert_called_with(name='foo') mock_controller.disassociate_units.assert_called_once_with( repo_association, [module_foo]) mock_repo.get_repo_or_missing_resource.assert_called_once_with( 'test_repo') mock_controller.rebuild_content_unit_counts.assert_called_once_with( test_repo)
def test_invalid_repo_id(self): """ Ensure that validation raises as expected when invalid characters are present. """ repo_obj = model.Repository('invalid_char%') self.assertRaises(ValidationError, repo_obj.validate)
def test_to_transfer_repo(self): """ Test changing a repository object into a transfer unit for plugins. """ dt = dateutils.now_utc_datetime_with_tzinfo() data = { 'repo_id': 'foo', 'display_name': 'bar', 'description': 'baz', 'notes': 'qux', 'content_unit_counts': { 'units': 1 }, 'last_unit_added': dt, 'last_unit_removed': dt } repo_obj = model.Repository(**data) repo = repo_obj.to_transfer_repo() self.assertEquals('foo', repo.id) self.assertFalse(hasattr(repo, 'repo_id')) self.assertEquals('bar', repo.display_name) self.assertEquals('baz', repo.description) self.assertEquals('qux', repo.notes) self.assertEquals({'units': 1}, repo.content_unit_counts) self.assertEquals(dt, repo.last_unit_added) self.assertEquals(dt, repo.last_unit_removed)
def create_repo(repo_id, display_name=None, description=None, notes=None, importer_type_id=None, importer_repo_plugin_config=None, distributor_list=[]): """ Create a repository and add importers and distributors if they are specified. If there are any issues adding any of the importers or distributors, the repo will be deleted and the exceptions will be reraised. Multiple distributors can be created in this call. Each distributor is specified as a dict with the following keys: distributor_type - ID of the type of distributor being added distributor_config - values sent to the distributor when used by this repository auto_publish - boolean indicating if the distributor should automatically publish with every sync; defaults to False distributor_id - used to refer to the distributor later; if omitted, one will be generated :param repo_id: unique identifier for the repo :type repo_id: str :param display_name: user-friendly name for the repo :type display_name: str :param description: user-friendly text describing the repo's contents :type description: str :param notes: key-value pairs to programmatically tag the repo :type notes: dict :param importer_type_id: if specified, an importer with this type ID will be added to the repo :type importer_type_id: str :param importer_repo_plugin_config: configuration values for the importer, may be None :type importer_repo_plugin_config: dict :param distributor_list: iterable of distributor dicts to add; more details above :type distributor_list: list or tuple :raises DuplicateResource: if there is already a repo with the requested ID :raises InvalidValue: if any of the fields are invalid :return: created repository object :rtype: pulp.server.db.model.Repository """ # Prevalidation. if not isinstance(distributor_list, (list, tuple)): raise pulp_exceptions.InvalidValue(['distributor_list']) # Note: the repo must be saved before the importer and distributor managers can be called # because the first thing that they do is validate that the repo exists. repo = model.Repository(repo_id=repo_id, display_name=display_name, description=description, notes=notes) try: repo.save() except NotUniqueError: raise pulp_exceptions.DuplicateResource(repo_id) except ValidationError, e: raise pulp_exceptions.InvalidValue(e.to_dict().keys())
def test_update_from_delta(self): """ Update repository information from a delta dictionary. """ repo_obj = model.Repository('mock_repo') repo_obj.update_from_delta({'display_name': 'dn_updated', 'description': 'd_update'}) self.assertEqual(repo_obj.display_name, 'dn_updated') self.assertEqual(repo_obj.description, 'd_update')
def populate(self): # make content/ dir. os.makedirs(os.path.join(self.parentfs, 'content')) repository = model.Repository() repository.repo_id = self.REPO_ID repository.save() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): units = self.add_units(0, self.NUM_UNITS) self.units = units
def test_total_packages_in_repo_empty_repo(self): """ in case the repo is empty and has no data in content_unit_counts, make sure this returns 0 """ repo = model.Repository(repo_id='foo') step = publish.PublishRpmStep(mock.Mock(package_dir=None)) step.repo = repo.to_transfer_repo() total = step.total_packages_in_repo self.assertEqual(total, 0)
def test_queued(self, m_tags, m_set): """ Test that the set_importer task is queued correctly. """ repo = model.Repository('m_id') result = importer.queue_set_importer(repo, 'm_type', 'm_conf') m_task_tags = [m_tags.resource_tag.return_value, m_tags.action_tag.return_value] m_set.apply_async_with_reservation.assert_called_once_with( m_tags.RESOURCE_REPOSITORY_TYPE, 'm_id', ['m_id', 'm_type'], {'repo_plugin_config': 'm_conf'}, tags=m_task_tags) self.assertTrue(result is m_set.apply_async_with_reservation.return_value)
def test_total_packages_in_repo(self): """ the "total_packages_in_repo" property should calculate a number based on content unit counts on the repo """ repo = model.Repository(repo_id='foo') repo.content_unit_counts = {'rpm': 2, 'srpm': 3} step = publish.PublishRpmStep(mock.Mock(package_dir=None)) step.repo = repo.to_transfer_repo() total = step.total_packages_in_repo self.assertEqual(total, 5)
def test_update_from_delta_notes(self): """ Test the update of notes. Make sure new fields are added, fields changed to `None` are removed, and existing fields are modified. """ repo_obj = model.Repository('mock_repo', notes={'remove': 1, 'leave': 2, 'modify': 3}) repo_obj.update_from_delta({'notes': {'remove': None, 'modify': 4, 'add': 5}}) self.assertEqual(repo_obj.repo_id, 'mock_repo') self.assertFalse('remove' in repo_obj.notes) self.assertEqual(repo_obj.notes['leave'], 2) self.assertEqual(repo_obj.notes['modify'], 4) self.assertEqual(repo_obj.notes['add'], 5)
def test_init_with_date_and_export_dir(self, mock_export_utils, mock_repo_qs, m_wd): mock_export_utils.return_value = 'foo' export_dir = 'flux' config = PluginCallConfiguration(None, {constants.EXPORT_DIRECTORY_KEYWORD: export_dir}) repo_group = mock.Mock(repo_ids=['foo', 'bar'], working_dir=self.working_dir) foo = model.Repository(repo_id='foo', display_name='foo', description='description', notes={'_repo-type': 'rpm-repo'}, content_unit_counts={'rpm': 1}) bar = model.Repository(repo_id='bar', display_name='bar', description='description', notes={'_repo-type': 'puppet'}, content_unit_counts={'puppet-module': 1}) mock_repo_qs.return_value = [foo, bar] step = publish.ExportRepoGroupPublisher(repo_group, self.publisher.get_conduit(), config, EXPORT_DISTRIBUTOR_ID) self.assertTrue(isinstance(step.children[0], publish.ExportRepoPublisher)) self.assertEquals(len(step.children), 1) self.assertEquals(step.children[0].children[0].association_filters, 'foo') self.assertEquals(step.children[0].children[1].association_filters, 'foo')
def test_init_with_date_and_iso(self, mock_export_utils, mock_repo_qs, mock_wd): mock_export_utils.return_value = 'foo' config = PluginCallConfiguration(None, {}) repo_group = mock.Mock(repo_ids=['foo', 'bar'], working_dir=self.working_dir) foo = model.Repository(repo_id='foo', display_name='foo', description='description', notes={'_repo-type': 'rpm-repo'}, content_unit_counts={'rpm': 1}) mock_repo_qs.return_value = [foo] step = publish.ExportRepoGroupPublisher(repo_group, self.publisher.get_conduit(), config, EXPORT_DISTRIBUTOR_ID) self.assertTrue(isinstance(step.children[0], publish.ExportRepoPublisher)) self.assertTrue(isinstance(step.children[1], publish.CreateIsoStep)) self.assertTrue(isinstance(step.children[2], publish.AtomicDirectoryPublishStep)) self.assertEquals(len(step.children), 3) self.assertEquals(step.children[0].children[0].association_filters, 'foo') self.assertEquals(step.children[0].children[1].association_filters, 'foo')
def test_build_reports(self): """ Tests that the conduit correctly inserts the count values into the report. """ model.Repository(repo_id='repo-1').save() # Created - 10 for i in range(0, 10): unit_key = {'key-1': 'unit_%d' % i} unit = self.conduit.init_unit(TYPE_1_DEF.id, unit_key, {}, '/foo/bar') self.conduit.save_unit(unit) # Removed - 1 doomed = self.conduit.get_units()[0] self.conduit.remove_unit(doomed) # Updated - 1 update_me = self.conduit.init_unit(TYPE_1_DEF.id, {'key-1': 'unit_5'}, {}, '/foo/bar') self.conduit.save_unit(update_me) # Test success_report = self.conduit.build_success_report('summary', 'details') failure_report = self.conduit.build_failure_report('summary', 'details') cancel_report = self.conduit.build_cancel_report('summary', 'details') # Verify self.assertEqual(success_report.success_flag, True) self.assertEqual(success_report.canceled_flag, False) self.assertEqual(failure_report.success_flag, False) self.assertEqual(failure_report.canceled_flag, False) self.assertEqual(cancel_report.success_flag, False) self.assertEqual(cancel_report.canceled_flag, True) for r in (success_report, failure_report, cancel_report): self.assertTrue(isinstance(r, SyncReport)) self.assertEqual(10, r.added_count) self.assertEqual(1, r.removed_count) self.assertEqual(1, r.updated_count) self.assertEqual('summary', r.summary) self.assertEqual('details', r.details)
def test_model_superclass(self): """ Ensure that the Repository model is a subclass of Mongoengine's Document class. """ sample_model = model.Repository() self.assertTrue(isinstance(sample_model, Document))
def test_finalize(self, mock_repo_controller): repo = Repository('repo1') repo.repo_obj = model.Repository(repo_id=repo.id) step = publish_step.SaveUnitsStep('foo_type', repo=repo) step.finalize() mock_repo_controller.rebuild_content_unit_counts.assert_called_once_with(repo.repo_obj)
def _generate_repo(self, repo_id): repo_model = model.Repository(repo_id=repo_id, display_name=repo_id) repo_model.save() return repo_model
def setUp(self): super(TestUnitsFromCriteria, self).setUp() self.manager = association_manager.RepoUnitAssociationManager() self.repo = me_model.Repository(repo_id='repo1')
def setUp(self): super(RemoveRepoDuplicateNevra, self).setUp() # repo_a is based on the test repo defined in TestPurgeBase self.repo_a = platform_model.Repository(repo_id=self.repo.id) self.repo_a.save() # repo_b is a control repo, that should be untouched by purge functions self.repo_b = platform_model.Repository(repo_id='b') self.repo_b.save() # create units unit_key_base = { 'epoch': '0', 'version': '0', 'release': '23', 'arch': 'noarch', 'checksumtype': 'sha256', '_last_updated': 0, } units = [] self.duplicate_unit_ids = set() for unit_type in self.UNIT_TYPES: unit_key_dupe = unit_key_base.copy() unit_key_uniq = unit_key_base.copy() # account for slightly different unit key field on drpm if unit_type is models.DRPM: unit_key_dupe['filename'] = 'dupe' unit_key_uniq['filename'] = 'uniq' else: unit_key_dupe['name'] = 'dupe' unit_key_uniq['name'] = 'uniq' # create units with duplicate nevra for this type # after purging, only one of the three should remain for i in range(3): unit_dupe = unit_type(**unit_key_dupe) # use the unit's python id to guarantee a unique "checksum" unit_dupe.checksum = str(id(unit_dupe)) unit_dupe.save() units.append(unit_dupe) if i != 0: # after the first unit, stash the "extra" duplicates to make it easier # to modify the unit association updated timestamps for predictable sorting self.duplicate_unit_ids.add(unit_dupe.id) # use the incrementing unit count to make the uniq unit's nevra unique unit_key_uniq['version'] = str(len(units)) # create a unit with unique nevra unit_uniq = unit_type(**unit_key_uniq) unit_uniq.checksum = str(hash(unit_uniq)) unit_uniq.save() units.append(unit_uniq) # associate each unit with each repo for repo in self.repo_a, self.repo_b: for i, unit in enumerate(units): repo_controller.associate_single_unit(repo, unit) # Sanity check: 3 dupe units and 1 uniq unit for n unit types, for each repo expected_rcu_count = 4 * len(self.UNIT_TYPES) for repo_id in self.repo_a.repo_id, self.repo_b.repo_id: self.assertEqual( platform_model.RepositoryContentUnit.objects.filter( repo_id=repo_id).count(), expected_rcu_count) # To ensure the purge mechanism behavior is predictable for testing, # go through the duplicate unit IDs and set their updated time to be in the past, # since unit associations were all just created at the same time. # The older associations are the ones that should be purged. earlier_timestamp = dateutils.now_utc_timestamp() - 3600 formatted_timestamp = dateutils.format_iso8601_utc_timestamp( earlier_timestamp) platform_model.RepositoryContentUnit.objects.filter(unit_id__in=self.duplicate_unit_ids)\ .update(set__updated=formatted_timestamp)