def tearDown(self): base.PulpWebserviceTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def tearDown(self): super(ConsumerTest, self).tearDown() Consumer.get_collection().remove(safe=True) Repo.get_collection().remove(safe=True) RepoDistributor.get_collection().remove(safe=True) Bind.get_collection().remove(safe=True) mock_plugins.reset()
def clean(self): super(RepoPublishConduitTests, self).clean() mock_plugins.reset() Repo.get_collection().remove() RepoDistributor.get_collection().remove()
def clean(self): super(RepoManagerTests, self).clean() model.Repository.drop_collection() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() TaskStatus.objects().delete()
def clean(self): super(RepoManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() dispatch.TaskStatus.objects().delete()
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def clean(self): super(RepoDistributorManagerTests, self).clean() mock_plugins.MOCK_DISTRIBUTOR.reset_mock() Repo.get_collection().remove() RepoDistributor.get_collection().remove()
def clean(self): Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def test_import(self): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() repo = Repository(self.REPO_ID) cfg = { 'protocol':'file', 'http':{'alias':self.alias}, 'https':{'alias':self.alias}, 'file':{'alias':self.alias}, } conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = 'file://' + publisher.manifest_path() cfg = dict(manifest_url=manifest_url, strategy=constants.MIRROR_STRATEGY) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) importer.sync_repo(repo, conduit, cfg) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def tearDown(self): super(BindManagerTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def tearDown(self): PulpItineraryTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def tearDown(self): PulpRPMTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() RepoDistributor.get_collection().remove() database.clean() plugins.finalize()
def tearDown(self): super(self.__class__, self).tearDown() Consumer.get_collection().remove() ConsumerGroup.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() mock_plugins.reset()
def setUp(self): base.PulpWebserviceTests.setUp(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def tearDown(self): super(BindManagerTests, self).tearDown() Consumer.get_collection().remove() model.Repository.drop_collection() RepoDistributor.get_collection().remove() Bind.get_collection().remove() ConsumerHistoryEvent.get_collection().remove() mock_plugins.reset()
def setUp(self): super(BindManagerTests, self).setUp() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def setUp(self): PulpItineraryTests.setUp(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install() mock_agent.install()
def setUp(self): super(self.__class__, self).setUp() Consumer.get_collection().remove() ConsumerGroup.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() plugin_api._create_manager() mock_plugins.install()
def setUp(self): super(BaseProfilerConduitTests, self).setUp() Consumer.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() plugin_api._create_manager() typedb.update_database([self.TYPE_1_DEF, self.TYPE_2_DEF]) mock_plugins.install()
def tearDown(self): super(BaseProfilerConduitTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() typedb.clean() factory.reset()
def tearDown(self): ServerTests.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def test_last_missing_distributor(self): """ Tests getting last publish for a distributor that doesn't exist """ # Setup dist = RepoDistributor("repo-1", "dist-1", "type-1", None, True) RepoDistributor.get_collection().save(dist) # Test self.assertRaises(MissingResource, self.publish_manager.last_publish, "repo-1", "random-dist")
def clean(self, units_only=False, plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if units_only: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if plugins: plugin_api._MANAGER.distributors.plugins = {}
def tearDown(self): WebTest.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def clean(self, just_units=False, purge_plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if just_units: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if purge_plugins: plugin_api._MANAGER.importers.plugins = {} plugin_api._MANAGER.distributors.plugins = {}
def test_last_publish_never_published(self): """ Tests getting the last publish date for an unpublished repo. """ # Setup dist = RepoDistributor("repo-1", "dist-1", "type-1", None, True) RepoDistributor.get_collection().save(dist) # Test last = self.publish_manager.last_publish("repo-1", "dist-1") # should not error # Verify self.assertTrue(last is None)
def get_distributor_scratchpad(self, repo_id, distributor_id): """ Returns the contents of the distributor's scratchpad for the given repo. If there is no such distributor or the scratchpad has not been set, None is returned. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor on the repo @type distributor_id: str @return: value set for the distributor's scratchpad @rtype: anything that can be saved in the database """ distributor_coll = RepoDistributor.get_collection() # Validation repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) if repo_distributor is None: return None scratchpad = repo_distributor.get('scratchpad', None) return scratchpad
def set_distributor_scratchpad(self, repo_id, distributor_id, contents): """ Sets the value of the scratchpad for the given repo and saves it to the database. If there is a previously saved value it will be replaced. If there is no distributor with the given ID on the repo, this call does nothing. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor on the repo @type distributor_id: str @param contents: value to write to the scratchpad field @type contents: anything that can be saved in the database """ distributor_coll = RepoDistributor.get_collection() # Validation repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) if repo_distributor is None: return # Update repo_distributor['scratchpad'] = contents distributor_coll.save(repo_distributor, safe=True)
def _do_publish(self, repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: publish_report = distributor_instance.publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result(repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _LOG.exception(_('Exception caught from plugin during publish for repo [%(r)s]' % {'r' : repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2]
def last_publish(self, repo_id, distributor_id): """ Returns the timestamp of the last publish call, regardless of its success or failure. If the repo has never been published, returns None. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the repo's distributor @type distributor_id: str @return: timestamp of the last publish @rtype: datetime or None @raise MissingResource: if there is no distributor identified by the given repo ID and distributor ID """ # Validation coll = RepoDistributor.get_collection() repo_distributor = coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) if repo_distributor is None: raise MissingResource(repo_id) # Convert to datetime instance date_str = repo_distributor['last_publish'] if date_str is None: return date_str else: instance = dateutils.parse_iso8601_datetime(date_str) return instance
def test_import(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def migrate(*args, **kwargs): """ Convert last_published iso8601 string to native date object. """ key = 'last_publish' collection = RepoDistributor.get_collection() for distributor in collection.find(): last_publish = distributor[key] if not isinstance(last_publish, basestring): # already migrated continue distributor[key] = parse_iso8601_datetime(last_publish) collection.save(distributor, safe=True)
def test_import_cached_manifest_missing_units(self, *unused): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def test_update_auto_publish(self): # Setup self.repo_manager.create_repo('test-repo') config = {'key': 'value'} distributor = self.distributor_manager.add_distributor( 'test-repo', 'mock-distributor', config, True) # Test self.distributor_manager.update_distributor_config( 'test-repo', distributor['id'], {}, False) repo_dist = RepoDistributor.get_collection().find_one( {'repo_id': 'test-repo'}) self.assertFalse(repo_dist['auto_publish'])
def test_last_publish(self): """ Tests retrieving the last publish instance. """ # Setup expected = datetime.datetime(year=2020, month=4, day=12, hour=0, minute=23) date_str = dateutils.format_iso8601_datetime(expected) dist = RepoDistributor('repo-1', 'dist-1', 'type-1', None, True) dist['last_publish'] = date_str RepoDistributor.get_collection().save(dist) # Test last = self.publish_manager.last_publish('repo-1', 'dist-1') # Verify self.assertEqual(expected, last)
def test_last_publish(self): """ Tests retrieving the last publish time in both the unpublish and previously published cases. """ # Test - Unpublished unpublished = self.conduit.last_publish() self.assertTrue(unpublished is None) # Setup - Previous publish last_publish = datetime.datetime.now() repo_dist = RepoDistributor.get_collection().find_one( {'repo_id': 'repo-1'}) repo_dist['last_publish'] = dateutils.format_iso8601_datetime( last_publish) RepoDistributor.get_collection().save(repo_dist, safe=True) # Test - Last publish found = self.conduit.last_publish() self.assertTrue(isinstance(found, datetime.datetime)) # check returned format self.assertEqual(repo_dist['last_publish'], dateutils.format_iso8601_datetime(found))
def test_bind_missing_distributor(self): # Setup self.populate() collection = RepoDistributor.get_collection() collection.remove({}) # Test manager = factory.consumer_bind_manager() self.assertRaises(InvalidValue, manager.bind, self.CONSUMER_ID, self.REPO_ID, self.DISTRIBUTOR_ID, self.NOTIFY_AGENT, self.BINDING_CONFIG) collection = Bind.get_collection() binds = collection.find({}) binds = [b for b in binds] self.assertEqual(len(binds), 0)
def test_last_publish(self): """ Tests retrieving the last publish time in both the unpublish and previously published cases. """ # Test - Unpublished unpublished = self.conduit.last_publish() self.assertTrue(unpublished is None) # Setup - Previous publish last_publish = datetime.datetime(2015, 4, 29, 20, 23, 56, 0) repo_dist = RepoDistributor.get_collection().find_one( {'repo_id': 'repo-1'}) repo_dist['last_publish'] = last_publish RepoDistributor.get_collection().save(repo_dist) # Test - Last publish found = self.conduit.last_publish() self.assertTrue(isinstance(found, datetime.datetime)) # check returned format self.assertEqual(found.tzinfo, dateutils.utc_tz()) self.assertEqual(repo_dist['last_publish'], found.replace(tzinfo=None))
def setUp(self): ServerTests.setUp(self) self.parentfs = self.tmpdir('parent-') self.childfs = self.tmpdir('child-') self.alias = (self.parentfs, self.parentfs) Consumer.get_collection().remove() Bind.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() plugin_api._create_manager() imp_conf = dict(strategy=constants.MIRROR_STRATEGY) plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, imp_conf) plugin_api._MANAGER.distributors.add_plugin(constants.HTTP_DISTRIBUTOR, NodesHttpDistributor, {}) plugin_api._MANAGER.distributors.add_plugin(FAKE_DISTRIBUTOR, FakeDistributor, FAKE_DISTRIBUTOR_CONFIG) plugin_api._MANAGER.profilers.add_plugin(constants.PROFILER_ID, NodeProfiler, {})
def test_add_distributor(self): """ Tests adding a distributor to a new repo. """ # Setup self.repo_manager.create_repo('test_me') config = {'key1': 'value1', 'key2': None} # Test added = self.distributor_manager.add_distributor('test_me', 'mock-distributor', config, True, distributor_id='my_dist') # Verify expected_config = {'key1': 'value1'} # Database all_distributors = list(RepoDistributor.get_collection().find()) self.assertEqual(1, len(all_distributors)) self.assertEqual('my_dist', all_distributors[0]['id']) self.assertEqual('mock-distributor', all_distributors[0]['distributor_type_id']) self.assertEqual('test_me', all_distributors[0]['repo_id']) self.assertEqual(expected_config, all_distributors[0]['config']) self.assertTrue(all_distributors[0]['auto_publish']) # Returned Value self.assertEqual('my_dist', added['id']) self.assertEqual('mock-distributor', added['distributor_type_id']) self.assertEqual('test_me', added['repo_id']) self.assertEqual(expected_config, added['config']) self.assertTrue(added['auto_publish']) # Plugin - Validate Config self.assertEqual(1, mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_count) call_repo = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[0][0] call_config = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[0][1] self.assertTrue(isinstance(call_repo, Repository)) self.assertEqual('test_me', call_repo.id) self.assertTrue(isinstance(call_config, PluginCallConfiguration)) self.assertTrue(call_config.plugin_config is not None) self.assertEqual(call_config.repo_plugin_config, expected_config) # Plugin - Distributor Added self.assertEqual(1, mock_plugins.MOCK_DISTRIBUTOR.distributor_added.call_count) call_repo = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[0][0] call_config = mock_plugins.MOCK_DISTRIBUTOR.validate_config.call_args[0][1] self.assertTrue(isinstance(call_repo, Repository)) self.assertTrue(isinstance(call_config, PluginCallConfiguration))
def list_publish_schedules(self, repo_id, distributor_id): """ Add a scheduled publish for the repo to the given distributor. @param repo_id: @param distributor_id: """ collection = RepoDistributor.get_collection() distributor = collection.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if distributor is None: raise MissingResource(repo=repo_id, distributor=distributor_id) return distributor['scheduled_publishes']
def test_add_distributor_no_explicit_id(self): """ Tests the ID generation when one is not specified for a distributor. """ # Setup self.repo_manager.create_repo('happy-repo') # Test added = self.distributor_manager.add_distributor('happy-repo', 'mock-distributor', {}, True) # Verify distributor = RepoDistributor.get_collection().find_one({'repo_id' : 'happy-repo', 'id' : added['id']}) self.assertTrue(distributor is not None)
def remove_distributor(repo_id, distributor_id): """ Removes a distributor from a repository. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to delete @type distributor_id: str @raise MissingResource: if repo_id doesn't correspond to a valid repo @raise MissingResource: if there is no distributor with the given ID """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(distributor=distributor_id) # remove schedules RepoPublishScheduleManager().delete_by_distributor_id( repo_id, repo_distributor['id']) # Call the distributor's cleanup method distributor_type_id = repo_distributor['distributor_type_id'] distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) call_config = PluginCallConfiguration(plugin_config, repo_distributor['config']) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) distributor_instance.distributor_removed(transfer_repo, call_config) # Update the database to reflect the removal distributor_coll.remove({'_id': repo_distributor['_id']}, safe=True)
def test_create_and_configure_repo(self): """ Tests the successful creation of a repo + plugins. """ # Setup repo_id = 'full' display_name = 'Full' description = 'Full Test' notes = {'n': 'n'} importer_type_id = 'mock-importer' importer_repo_plugin_config = {'i': 'i'} distributors = [ dict(distributor_type_id='mock-distributor', distributor_config={'d': 'd'}, auto_publish=True, distributor_id='dist1'), dict(distributor_type_id='mock-distributor', distributor_config={'d': 'd'}, auto_publish=True, distributor_id='dist2') ] # Test created = self.manager.create_and_configure_repo( repo_id, display_name, description, notes, importer_type_id, importer_repo_plugin_config, distributors) # Verify self.assertEqual(created['id'], repo_id) repo = Repo.get_collection().find_one({'id': repo_id}) self.assertEqual(repo['id'], repo_id) self.assertEqual(repo['display_name'], display_name) self.assertEqual(repo['description'], description) self.assertEqual(repo['notes'], notes) importer = RepoImporter.get_collection().find_one({'repo_id': repo_id}) self.assertEqual(importer['importer_type_id'], importer_type_id) self.assertEqual(importer['config'], importer_repo_plugin_config) for d in distributors: distributor = RepoDistributor.get_collection().find_one( {'id': d['distributor_id']}) self.assertEqual(distributor['repo_id'], repo_id) self.assertEqual(distributor['distributor_type_id'], d['distributor_type_id']) self.assertEqual(distributor['auto_publish'], d['auto_publish']) self.assertEqual(distributor['config'], d['distributor_config'])
def find_by_repo_list(repo_id_list): """ Returns serialized versions of all distributors for given repos. Any IDs that do not refer to valid repos are ignored and will not raise an error. @param repo_id_list: list of distributor IDs to fetch @type repo_id_list: list of str @return: list of serialized distributors @rtype: list of dict """ spec = {'repo_id': {'$in': repo_id_list}} projection = {'scratchpad': 0} return list(RepoDistributor.get_collection().find(spec, projection))
def test_remove_distributor(self): """ Tests removing an existing distributor from a repository. """ # Setup self.repo_manager.create_repo('dist-repo') self.distributor_manager.add_distributor('dist-repo', 'mock-distributor', {}, True, distributor_id='doomed') # Test self.distributor_manager.remove_distributor('dist-repo', 'doomed') # Verify distributor = RepoDistributor.get_collection().find_one({'repo_id' : 'dist-repo', 'id' : 'doomed'}) self.assertTrue(distributor is None)
def publish(repo_id, dist_id, publish_config_override=None): """ Uses the given distributor to publish the repository. The publish operation is executed synchronously in the caller's thread and will block until it is completed. The caller must take the necessary steps to address the fact that a publish call may be time intensive. :param repo_id: identifies the repo being published :type repo_id: str :param dist_id: identifies the repo's distributor to publish :type dist_id: str :param publish_config_override: optional config values to use for this publish call only :type publish_config_override: dict, None :return: report of the details of the publish :rtype: pulp.server.db.model.repository.RepoPublishResult :raises pulp_exceptions.MissingResource: if distributor/repo pair does not exist """ distributor_coll = RepoDistributor.get_collection() repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': dist_id }) if repo_distributor is None: raise pulp_exceptions.MissingResource(repository=repo_id, distributor=dist_id) dist_inst, dist_conf = _get_distributor_instance_and_config( repo_id, dist_id) # Assemble the data needed for the publish conduit = RepoPublishConduit(repo_id, dist_id) call_config = PluginCallConfiguration(dist_conf, repo_distributor['config'], publish_config_override) transfer_repo = repo_obj.to_transfer_repo() transfer_repo.working_dir = common_utils.get_working_directory() # Fire events describing the publish state fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_publish_started(repo_id, dist_id) result = _do_publish(repo_obj, dist_id, dist_inst, transfer_repo, conduit, call_config) fire_manager.fire_repo_publish_finished(result) return result
def remove_publish_schedule(self, repo_id, distributor_id, schedule_id): """ Add a scheduled publish for the repo to the given distributor. @param repo_id: @param distributor_id: @param schedule_id: """ collection = RepoDistributor.get_collection() distributor = collection.find_one({'repo_id': repo_id, 'id': distributor_id}) if distributor is None: raise MissingResource(repo=repo_id, distributor=distributor_id) if schedule_id not in distributor['scheduled_publishes']: return collection.update({'_id': distributor['_id']}, {'$pull': {'scheduled_publishes': schedule_id}}, safe=True)
def auto_distributors(repo_id): """ Returns all distributors for the given repo that are configured automatic publishing. :param repo_id: limit distributors to this repo :type repo_id: str :return: list of distributors for specified repo that have autopublish enabled :rtype: list of dicts """ dist_coll = RepoDistributor.get_collection() auto_distributors = list( dist_coll.find({ 'repo_id': repo_id, 'auto_publish': True })) return auto_distributors
def test_publish_with_error(self): """ Tests a publish when the plugin raises an error. """ # Setup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = Exception() self.repo_manager.create_repo('gonna-bail') self.distributor_manager.add_distributor('gonna-bail', 'mock-distributor', {}, False, distributor_id='bad-dist') self.assertRaises(Exception, self.publish_manager.publish, 'gonna-bail', 'bad-dist') # Verify repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'gonna-bail', 'id': 'bad-dist' }) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'gonna-bail'})) self.assertEqual(1, len(entries)) self.assertEqual('gonna-bail', entries[0]['repo_id']) self.assertEqual('bad-dist', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]['result']) self.assertTrue(entries[0]['summary'] is None) self.assertTrue(entries[0]['details'] is None) self.assertTrue(entries[0]['error_message'] is not None) self.assertTrue(entries[0]['exception'] is not None) self.assertTrue(entries[0]['traceback'] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def publish_history(self, repo_id, distributor_id, limit=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to retrieve history for @type distributor_id: str @param limit: maximum number of results to return @type limit: int @return: list of publish history result instances @rtype: list of L{pulp.server.db.model.repository.RepoPublishResult} @raise MissingResource: if repo_id does not reference a valid repo """ # Validation repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if dist is None: raise MissingResource(distributor_id) if limit is None: limit = 10 # default here for each of REST API calls into here # Retrieve the entries cursor = RepoPublishResult.get_collection().find({ 'repo_id': repo_id, 'distributor_id': distributor_id }) cursor.limit(limit) cursor.sort('completed', pymongo.DESCENDING) return list(cursor)
def get_distributors(self, repo_id): """ Returns all distributors on the given repo. @param repo_id: identifies the repo @type repo_id: str @return: list of key-value pairs describing the distributors; empty list if there are none for the given repo @rtype: list, None @raise MissingResource: if the given repo doesn't exist """ model.Repository.objects.get_repo_or_missing_resource(repo_id) distributors = list(RepoDistributor.get_collection().find({'repo_id': repo_id})) return distributors
def _do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler( distributor_instance.publish_repo, distributor_instance.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) if publish_report is not None and hasattr(publish_report, 'success_flag') \ and not publish_report.success_flag: raise PulpCodedException(error_code=error_codes.PLP0034, repository_id=repo_id, distributor_id=distributor_id) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_id})) raise
def test_remove_distributor(self, mock_delete_schedules, mock_repo_qs): """ Tests removing an existing distributor from a repository. """ self.distributor_manager.add_distributor('dist-repo', 'mock-distributor', {}, True, distributor_id='doomed') self.distributor_manager.remove_distributor('dist-repo', 'doomed') # Verify distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'dist-repo', 'id': 'doomed' }) self.assertTrue(distributor is None) mock_delete_schedules.assert_called_once_with('dist-repo', 'doomed')
def test_add_distributor_multiple_distributors(self): """ Tests adding a second distributor to a repository. """ self.repo_manager.create_repo('test_me') self.distributor_manager.add_distributor('test_me', 'mock-distributor', {}, True, distributor_id='dist_1') # Test self.distributor_manager.add_distributor('test_me', 'mock-distributor-2', {}, True, distributor_id='dist_2') # Verify all_distributors = list(RepoDistributor.get_collection().find()) self.assertEqual(2, len(all_distributors)) dist_ids = [d['id'] for d in all_distributors] self.assertTrue('dist_1' in dist_ids) self.assertTrue('dist_2' in dist_ids)
def test_bind_missing_distributor(self): # Setup self.populate() collection = RepoDistributor.get_collection() collection.remove({}) # Test manager = factory.consumer_bind_manager() try: manager.bind(self.CONSUMER_ID, self.REPO_ID, self.DISTRIBUTOR_ID, self.NOTIFY_AGENT, self.BINDING_CONFIG) self.fail(msg='MissingResource <RepoDistributor>, expected') except MissingResource: # expected pass # Verify collection = Bind.get_collection() binds = collection.find({}) binds = [b for b in binds] self.assertEqual(len(binds), 0)
def get_repo_distributors_by_relative_url(self, rel_url, repo_id=None): """ Get the config repo_id and config objects matching a given relative URL :param rel_url: a relative URL for a distributor config :type rel_url: str :param repo_id: the id of a repo to skip, If not specified all repositories will be included in the search :type repo_id: str :return: a cursor to iterate over the list of repository configurations whose configuration conflicts with rel_url :rtype: pymongo.cursor.Cursor """ # build a list of all the sub urls that could conflict with the provided URL current_url_pieces = [x for x in rel_url.split("/") if x] matching_url_list = [] workingUrl = "/" for piece in current_url_pieces: workingUrl += piece matching_url_list.append(workingUrl) workingUrl += "/" # calculate the base field of the URL, this is used for tests where the repo id # is used as a substitute for the relative url: /repo-id/ repo_id_url = current_url_pieces[0] #search for all the sub url as well as any url that would fall within the specified url spec = {'$or': [{'config.relative_url': {'$regex': '^' + workingUrl + '.*'}}, {'config.relative_url': {'$in': matching_url_list}}, {'$and': [{'config.relative_url': {'$exists': False}}, {'repo_id': repo_id_url}]} ]} if repo_id is not None: spec = {'$and': [{'repo_id': {'$ne': repo_id}}, spec]} projection = {'repo_id': 1, 'config': 1} return RepoDistributor.get_collection().find(spec, projection)
def last_publish(self): """ Returns the timestamp of the last time this repo was published, regardless of the success or failure of the publish. If the repo was never published, this call returns None. :return: timestamp instance describing the last publish :rtype: datetime.datetime or None :raises DistributorConduitException: if any errors occur """ try: collection = RepoDistributor.get_collection() distributor = collection.find_one({ 'repo_id': self.repo_id, 'id': self.distributor_id }) if distributor is None: raise pulp_exceptions.MissingResource(self.repo_id) return distributor['last_publish'] except Exception, e: _logger.exception('Error getting last publish time for repo [%s]' % self.repo_id) raise DistributorConduitException(e), None, sys.exc_info()[2]
def _migrate_rpm_repositories(): ''' This migration takes care of adding export_distributor to all the old rpm repos with no export_distributor already associated to them. Since we have renamed iso_distributor to export_distributor, it also removes iso_distributor associated with an rpm repo. ''' collection = RepoDistributor.get_collection() for repo_distributor in collection.find(): # Check only for rpm repos if repo_distributor[ 'distributor_type_id'] == ids.TYPE_ID_DISTRIBUTOR_YUM: # Check if an export_distributor exists for the same repo if collection.find_one( { 'repo_id': repo_distributor['repo_id'], 'distributor_type_id': ids.TYPE_ID_DISTRIBUTOR_EXPORT }) is None: # If not, create a new one with default config export_distributor = RepoDistributor( repo_id=repo_distributor['repo_id'], id=ids.EXPORT_DISTRIBUTOR_ID, distributor_type_id=ids.TYPE_ID_DISTRIBUTOR_EXPORT, config=EXPORT_DISTRIBUTOR_CONFIG, auto_publish=False) collection.save(export_distributor, safe=True) # Remove iso_distributor associated with the repo iso_distributor = collection.find_one({ 'repo_id': repo_distributor['repo_id'], 'distributor_type_id': 'iso_distributor' }) if iso_distributor is not None: collection.remove(iso_distributor, safe=True)
def last_publish(self, repo_id, distributor_id): """ Returns the timestamp of the last publish call, regardless of its success or failure. If the repo has never been published, returns None. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the repo's distributor @type distributor_id: str @return: timestamp of the last publish @rtype: datetime or None @raise MissingResource: if there is no distributor identified by the given repo ID and distributor ID """ # Validation coll = RepoDistributor.get_collection() repo_distributor = coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(repo_id) # Convert to datetime instance date_str = repo_distributor['last_publish'] if date_str is None: return date_str else: instance = dateutils.parse_iso8601_datetime(date_str) return instance