def clean(self): super(RepoManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() dispatch.TaskStatus.objects().delete()
def clean(self): Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id' : 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id' : 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def clean(self): super(RepoManagerTests, self).clean() model.Repository.drop_collection() RepoImporter.get_collection().remove() RepoDistributor.get_collection().remove() TaskStatus.objects().delete()
def setUp(self): super(Migration0004Tests, self).setUp() # Special way to import modules that start with a number self.migration = _import_all_the_way( 'pulp_rpm.plugins.migrations.0004_pkg_group_category_repoid') factory.initialize() types_db.update_database([TYPE_DEF_GROUP, TYPE_DEF_CATEGORY]) # Create the repositories necessary for the tests self.source_repo_id = 'source-repo' # where units were copied from with the bad code self.dest_repo_id = 'dest-repo' # where bad units were copied to source_repo = Repo(self.source_repo_id, '') Repo.get_collection().insert(source_repo, safe=True) dest_repo = Repo(self.dest_repo_id, '') Repo.get_collection().insert(dest_repo, safe=True) source_importer = RepoImporter(self.source_repo_id, 'yum_importer', 'yum_importer', {}) RepoImporter.get_collection().insert(source_importer, safe=True) dest_importer = RepoImporter(self.dest_repo_id, 'yum_importer', 'yum_importer', {}) RepoImporter.get_collection().insert(dest_importer, safe=True)
def test_delete_with_plugins(self): """ Tests that deleting a repo that has importers and distributors configured deletes them as well. """ # Setup self.manager.create_repo('doomed') importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() importer_manager.set_importer('doomed', 'mock-importer', {}) distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-1') distributor_manager.add_distributor('doomed', 'mock-distributor', {}, True, distributor_id='dist-2') self.assertEqual(1, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(2, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) # Test self.manager.delete_repo('doomed') # Verify self.assertEqual(0, len(list(Repo.get_collection().find()))) self.assertEqual(0, len(list(RepoImporter.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(0, len(list(RepoDistributor.get_collection().find({'repo_id': 'doomed'})))) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) self.assertEqual(2, mock_plugins.MOCK_DISTRIBUTOR.distributor_removed.call_count) repo_working_dir = common_utils.repository_working_dir('doomed', mkdir=False) self.assertTrue(not os.path.exists(repo_working_dir))
def test_set_importer_with_existing(self, mock_repo_qs): """ Tests setting a different importer on a repo that already had one. """ class MockImporter2(Importer): @classmethod def metadata(cls): return {'types': ['mock_types_2']} def validate_config(self, repo_data, importer_config): return True mock_plugins.IMPORTER_MAPPINGS['mock-importer-2'] = MockImporter2() plugin_api._MANAGER.importers.add_plugin('mock-importer-2', MockImporter2, {}) self.importer_manager.set_importer('change_me', 'mock-importer', {}) # Test self.importer_manager.set_importer('change_me', 'mock-importer-2', {}) # Verify all_importers = list(RepoImporter.get_collection().find()) self.assertEqual(1, len(all_importers)) self.assertEqual(all_importers[0]['id'], 'mock-importer-2') self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) RepoImporter.get_collection().remove()
def tearDown(self): super(TestDoSync, self).tearDown() mock_plugins.reset() manager_factory.reset() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoSyncResult.get_collection().remove() MockRepoPublishManager.reset()
def clean(self): super(RepoSyncManagerTests, self).clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoSyncResult.get_collection().remove() # Reset the state of the mock's tracker variables MockRepoPublishManager.reset()
def tearDown(self): super(Migration0004Tests, self).tearDown() # Delete any sample data added for the test types_db.clean() RepoContentUnit.get_collection().remove() RepoImporter.get_collection().remove() Repo.get_collection().remove()
def set_importer(repo_id, importer_type_id, repo_plugin_config): """ Configures an importer to be used for the given repository. Keep in mind this method is written assuming single importer for a repo. The domain model technically supports multiple importers, but this call is what enforces the single importer behavior. :param repo_id: identifies the repo :type repo_id: str :param importer_type_id: identifies the type of importer being added; must correspond to an importer loaded at server startup :type importer_type_id: str :param repo_plugin_config: configuration values for the importer; may be None :type repo_plugin_config: dict :raise MissingResource: if repo_id does not represent a valid repo :raise InvalidImporterConfiguration: if the importer cannot be initialized for the given repo """ RepoImporterManager.validate_importer_config(repo_id, importer_type_id, repo_plugin_config) importer_coll = RepoImporter.get_collection() repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id) # Convention is that a value of None means unset. Remove any keys that # are explicitly set to None so the plugin will default them. if repo_plugin_config is not None: clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None]) else: clean_config = None # Let the importer plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, clean_config) transfer_repo = repo_obj.to_transfer_repo() # Remove old importer if one exists try: RepoImporterManager.remove_importer(repo_id) except MissingResource: pass # it didn't exist, so no harm done # Let the importer plugin initialize the repository try: importer_instance.importer_added(transfer_repo, call_config) except Exception: _logger.exception( 'Error initializing importer [%s] for repo [%s]' % (importer_type_id, repo_id)) raise PulpExecutionException(), None, sys.exc_info()[2] # Database Update importer_id = importer_type_id # use the importer name as its repo ID importer = RepoImporter(repo_id, importer_id, importer_type_id, clean_config) importer_coll.save(importer) return importer
def clean(self): super(DependencyManagerTests, self).clean() database.clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() mock_plugins.MOCK_IMPORTER.resolve_dependencies.return_value = None
def clean(self, units_only=False, plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if units_only: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if plugins: plugin_api._MANAGER.distributors.plugins = {}
def tearDown(self): ServerTests.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def tearDown(self): WebTest.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def clean(self, just_units=False, purge_plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if just_units: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if purge_plugins: plugin_api._MANAGER.importers.plugins = {} plugin_api._MANAGER.distributors.plugins = {}
def test_sync_bad_database(self): """ Tests the case where the database got itself in a bad state where the repo thinks it has an importer but the importer-repo relationship doc doesn't exist in the database. """ # Setup self.repo_manager.create_repo('good-repo') self.importer_manager.set_importer('good-repo', 'mock-importer', None) RepoImporter.get_collection().remove() self.assertRaises(repo_sync_manager.PulpExecutionException, self.sync_manager.sync, 'good-repo')
def test_remove_importer(self, mock_delete_schedules, mock_repo_qs): """ Tests the successful case of removing an importer. """ self.importer_manager.set_importer('whiterun', 'mock-importer', {}) importer = RepoImporter.get_collection().find_one({'repo_id': 'whiterun', 'id': 'mock-importer'}) self.assertTrue(importer is not None) self.importer_manager.remove_importer('whiterun') importer = RepoImporter.get_collection().find_one({'repo_id': 'whiterun', 'id': 'mock-importer'}) self.assertTrue(importer is None) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count) mock_delete_schedules.assert_called_once_with('whiterun', 'mock-importer')
def test_set_importer_added_raises_error(self, mock_repo_qs): """ Tests simulating an error coming out of the importer's validate config method. """ mock_plugins.MOCK_IMPORTER.importer_added.side_effect = Exception() config = {'hobbit': 'frodo'} try: self.importer_manager.set_importer('repo-1', 'mock-importer', config) self.fail('Exception expected for importer plugin exception') except exceptions.PulpExecutionException: pass finally: mock_plugins.MOCK_IMPORTER.importer_added.side_effect = None RepoImporter.get_collection().remove()
def delete(repo_id): """ Delete a repository and inform other affected collections. :param repo_id: id of the repository to delete. :type repo_id: str :raise pulp_exceptions.PulpExecutionException: if any part of the process fails; the exception will contain information on which sections failed :return: A TaskResult object with the details of any errors or spawned tasks :rtype: pulp.server.async.tasks.TaskResult """ # With so much going on during a delete, it's possible that a few things could go wrong while # others are successful. We track lesser errors that shouldn't abort the entire process until # the end and then raise an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user will have to look at # the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception('Error received removing importer [%s] from repo [%s]' % ( repo_importer['importer_type_id'], repo_id)) error_tuples.append(e)
def remove_importer(repo_id): """ Removes an importer from a repository. :param repo_id: identifies the repo :type repo_id: str :raise MissingResource: if the given repo does not exist :raise MissingResource: if the given repo does not have an importer """ importer_coll = RepoImporter.get_collection() # Validation repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo_importer = importer_coll.find_one({"repo_id": repo_id}) if repo_importer is None: raise MissingResource(repo_id) # remove schedules RepoSyncScheduleManager().delete_by_importer_id(repo_id, repo_importer["id"]) # Call the importer's cleanup method importer_type_id = repo_importer["importer_type_id"] importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id) call_config = PluginCallConfiguration(plugin_config, repo_importer["config"]) transfer_repo = repo_obj.to_transfer_repo() importer_instance.importer_removed(transfer_repo, call_config) # Update the database to reflect the removal importer_coll.remove({"repo_id": repo_id})
def test_update_importer_config(self): """ Tests the successful case of updating an importer's configuration. """ # Setup self.repo_manager.create_repo('winterhold') orig_config = {'key1': 'initial1', 'key2': 'initial2', 'key3': 'initial3'} self.importer_manager.set_importer('winterhold', 'mock-importer', orig_config) # Test config_delta = {'key1': 'updated1', 'key2': None} updated = self.importer_manager.update_importer_config('winterhold', config_delta) # Verify expected_config = {'key1': 'updated1', 'key3': 'initial3'} # Database importer = RepoImporter.get_collection().find_one( {'repo_id': 'winterhold', 'id': 'mock-importer'}) self.assertEqual(importer['config'], expected_config) # Return Value self.assertEqual(updated['config'], expected_config) # Plugin # initial and update self.assertEqual(2, mock_plugins.MOCK_IMPORTER.validate_config.call_count) # returns args from last call self.assertEqual( expected_config, mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][1].repo_plugin_config)
def test_sync_with_sync_config_override(self): """ Tests a sync when passing in an individual config of override options. """ # Setup importer_config = {'thor': 'thor'} self.repo_manager.create_repo('repo-1') self.importer_manager.set_importer('repo-1', 'mock-importer', importer_config) # Test sync_config_override = {'clint': 'hawkeye'} self.sync_manager.sync('repo-1', sync_config_override=sync_config_override) # Verify repo = Repo.get_collection().find_one({'id': 'repo-1'}) repo_importer = RepoImporter.get_collection().find_one({'repo_id': 'repo-1', 'id': 'mock-importer'}) # Database self.assertTrue(repo_importer['last_sync'] is not None) self.assertTrue(assert_last_sync_time(repo_importer['last_sync'])) # Call into the importer sync_args = mock_plugins.MOCK_IMPORTER.sync_repo.call_args[0] self.assertEqual(repo['id'], sync_args[0].id) self.assertTrue(sync_args[1] is not None) self.assertEqual({}, sync_args[2].plugin_config) self.assertEqual(importer_config, sync_args[2].repo_plugin_config) self.assertEqual(sync_config_override, sync_args[2].override_config)
def delete(repo_id): """ Delete a repository and inform other affected collections. :param repo_id: id of the repository to delete. :type repo_id: str :raise pulp_exceptions.PulpExecutionException: if any part of the process fails; the exception will contain information on which sections failed :return: A TaskResult object with the details of any errors or spawned tasks :rtype: pulp.server.async.tasks.TaskResult """ # With so much going on during a delete, it's possible that a few things could go wrong while # others are successful. We track lesser errors that shouldn't abort the entire process until # the end and then raise an exception describing the incompleteness of the delete. The exception # arguments are captured as the second element in the tuple, but the user will have to look at # the server logs for more information. error_tuples = [] # tuple of failed step and exception arguments importer_manager = manager_factory.repo_importer_manager() distributor_manager = manager_factory.repo_distributor_manager() # Inform the importer importer_coll = RepoImporter.get_collection() repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is not None: try: importer_manager.remove_importer(repo_id) except Exception, e: _logger.exception( 'Error received removing importer [%s] from repo [%s]' % (repo_importer['importer_type_id'], repo_id)) error_tuples.append(e)
def sync(repo_id, sync_config_override=None): """ Performs a synchronize operation on the given repository and triggers publishs for distributors with autopublish enabled. The given repo must have an importer configured. This method is intentionally limited to synchronizing a single repo. Performing multiple repository syncs concurrently will require a more global view of the server and must be handled outside the scope of this class. :param repo_id: identifies the repo to sync :type repo_id: str :param sync_config_override: optional config containing values to use for this sync only :type sync_config_override: dict :return: TaskResult containing sync results and a list of spawned tasks :rtype: pulp.server.async.tasks.TaskResult :raise pulp_exceptions.MissingResource: if specified repo does not exist, or it does not have an importer and associated plugin :raise pulp_exceptions.PulpExecutionException: if the task fails. """ repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) transfer_repo = repo_obj.to_transfer_repo() importer_collection = RepoImporter.get_collection() repo_importer = importer_collection.find_one({'repo_id': repo_obj.repo_id}) if repo_importer is None: raise pulp_exceptions.MissingResource(repository=repo_id) try: importer, imp_config = plugin_api.get_importer_by_id(repo_importer['importer_type_id']) except plugin_exceptions.PluginNotFound: raise pulp_exceptions.MissingResource(repository=repo_id) call_config = PluginCallConfiguration(imp_config, repo_importer['config'], sync_config_override) transfer_repo.working_dir = common_utils.get_working_directory() conduit = RepoSyncConduit(repo_id, repo_importer['id']) sync_result_collection = RepoSyncResult.get_collection() # Fire an events around the call fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_sync_started(repo_id) # Perform the sync sync_start_timestamp = _now_timestamp() sync_result = None try: # Replace the Importer's sync_repo() method with our register_sigterm_handler decorator, # which will set up cancel_sync_repo() as the target for the signal handler sync_repo = register_sigterm_handler(importer.sync_repo, importer.cancel_sync_repo) sync_report = sync_repo(transfer_repo, conduit, call_config) except Exception, e: sync_end_timestamp = _now_timestamp() sync_result = RepoSyncResult.error_result( repo_obj.repo_id, repo_importer['id'], repo_importer['importer_type_id'], sync_start_timestamp, sync_end_timestamp, e, sys.exc_info()[2]) raise
def find_by_repo_list(repo_id_list): """ Returns serialized versions of all importers for given repos. Any IDs that do not refer to valid repos are ignored and will not raise an error. @param repo_id_list: list of importer IDs to fetch @type repo_id_list: list of str @return: list of serialized importers @rtype: list of dict """ spec = {'repo_id': {'$in': repo_id_list}} projection = {'scratchpad': 0} importers = list(RepoImporter.get_collection().find(spec, projection)) # Process any scheduled syncs and get schedule details using schedule id for importer in importers: scheduled_sync_ids = importer.get('scheduled_syncs', None) if scheduled_sync_ids is not None: scheduled_sync_details = list( ScheduledCall.get_collection().find( {"id": { "$in": scheduled_sync_ids }})) importer['scheduled_syncs'] = [ s["schedule"] for s in scheduled_sync_details ] return importers
def test_update_importer_config(self): """ Tests the successful case of updating an importer's configuration. """ # Setup self.repo_manager.create_repo("winterhold") orig_config = {"key1": "initial1", "key2": "initial2", "key3": "initial3"} self.importer_manager.set_importer("winterhold", "mock-importer", orig_config) # Test config_delta = {"key1": "updated1", "key2": None} updated = self.importer_manager.update_importer_config("winterhold", config_delta) # Verify expected_config = {"key1": "updated1", "key3": "initial3"} # Database importer = RepoImporter.get_collection().find_one({"repo_id": "winterhold", "id": "mock-importer"}) self.assertEqual(importer["config"], expected_config) # Return Value self.assertEqual(updated["config"], expected_config) # Plugin self.assertEqual(2, mock_plugins.MOCK_IMPORTER.validate_config.call_count) # initial and update self.assertEqual( expected_config, mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][1].repo_plugin_config ) # returns args from last call
def repositories_with_yum_importers(): repo_importer_collection = RepoImporter.get_collection() repo_yum_importers = repo_importer_collection.find({"importer_type_id": _TYPE_YUM_IMPORTER}, fields=["repo_id"]) yum_repo_ids = [i["repo_id"] for i in repo_yum_importers] repo_collection = Repo.get_collection() yum_repos = repo_collection.find({"id": {"$in": yum_repo_ids}}, fields=["id", "scratchpad"]) return list(yum_repos)
def test_set_importer_with_existing(self): """ Tests setting a different importer on a repo that already had one. """ # Setup class MockImporter2(Importer): @classmethod def metadata(cls): return {"types": ["mock_types_2"]} def validate_config(self, repo_data, importer_config, related_repos): return True mock_plugins.IMPORTER_MAPPINGS["mock-importer-2"] = MockImporter2() plugin_api._MANAGER.importers.add_plugin("mock-importer-2", MockImporter2, {}) self.repo_manager.create_repo("change_me") self.importer_manager.set_importer("change_me", "mock-importer", {}) # Test self.importer_manager.set_importer("change_me", "mock-importer-2", {}) # Verify all_importers = list(RepoImporter.get_collection().find()) self.assertEqual(1, len(all_importers)) self.assertEqual(all_importers[0]["id"], "mock-importer-2") self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_removed.call_count)
def test_update_importer_config(self, m_serializer, mock_repo_qs): """ Tests the successful case of updating an importer's configuration. """ orig_config = {'key1': 'initial1', 'key2': 'initial2', 'key3': 'initial3'} self.importer_manager.set_importer('winterhold', 'mock-importer', orig_config) config_delta = {'key1': 'updated1', 'key2': None} self.importer_manager.update_importer_config('winterhold', config_delta) expected_config = {'key1': 'updated1', 'key3': 'initial3'} set_config = m_serializer.mock_calls[0][1][0]['config'] self.assertDictEqual(set_config, expected_config) # Database importer = RepoImporter.get_collection().find_one( {'repo_id': 'winterhold', 'id': 'mock-importer'}) self.assertEqual(importer['config'], expected_config) # Plugin # initial and update self.assertEqual(2, mock_plugins.MOCK_IMPORTER.validate_config.call_count) # returns args from last call self.assertEqual( expected_config, mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][1].repo_plugin_config)
def remove_importer(repo_id): """ Removes an importer from a repository. :param repo_id: identifies the repo :type repo_id: str :raise MissingResource: if the given repo does not exist :raise MissingResource: if the given repo does not have an importer """ importer_coll = RepoImporter.get_collection() # Validation repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is None: raise MissingResource(repo_id) # remove schedules RepoSyncScheduleManager().delete_by_importer_id(repo_id, repo_importer['id']) # Call the importer's cleanup method importer_type_id = repo_importer['importer_type_id'] importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id) call_config = PluginCallConfiguration(plugin_config, repo_importer['config']) transfer_repo = repo_obj.to_transfer_repo() importer_instance.importer_removed(transfer_repo, call_config) # Update the database to reflect the removal importer_coll.remove({'repo_id': repo_id})
def _do_sync(self, repo, importer_instance, transfer_repo, conduit, call_config): """ Once all of the preparation for a sync has taken place, this call will perform the sync, making the necessary database updates. It returns the sync result instance (already saved to the database). This call does not have any behavior based on the success/failure of the sync; it is up to the caller to raise an exception in the event of a failed sync if that behavior is desired. """ importer_coll = RepoImporter.get_collection() sync_result_coll = RepoSyncResult.get_collection() repo_id = repo['id'] # Perform the sync sync_start_timestamp = _now_timestamp() try: sync_report = importer_instance.sync_repo(transfer_repo, conduit, call_config) except Exception, e: # I really wish python 2.4 supported except and finally together sync_end_timestamp = _now_timestamp() # Reload the importer in case the plugin edits the scratchpad repo_importer = importer_coll.find_one({'repo_id' : repo_id}) repo_importer['last_sync'] = sync_end_timestamp importer_coll.save(repo_importer, safe=True) # Add a sync history entry for this run result = RepoSyncResult.error_result(repo_id, repo_importer['id'], repo_importer['importer_type_id'], sync_start_timestamp, sync_end_timestamp, e, sys.exc_info()[2]) sync_result_coll.save(result, safe=True) _LOG.exception(_('Exception caught from plugin during sync for repo [%(r)s]' % {'r' : repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2]
def _do_sync(self, repo, importer_instance, transfer_repo, conduit, call_config): """ Once all of the preparation for a sync has taken place, this call will perform the sync, making the necessary database updates. It returns the sync result instance (already saved to the database). This call does not have any behavior based on the success/failure of the sync; it is up to the caller to raise an exception in the event of a failed sync if that behavior is desired. """ importer_coll = RepoImporter.get_collection() sync_result_coll = RepoSyncResult.get_collection() repo_id = repo['id'] repo_importer = importer_coll.find_one({'repo_id' : repo_id}) # Perform the sync sync_start_timestamp = _now_timestamp() sync_end_timestamp = None result = None try: sync_report = importer_instance.sync_repo(transfer_repo, conduit, call_config) except Exception, e: sync_end_timestamp = _now_timestamp() result = RepoSyncResult.error_result(repo_id, repo_importer['id'], repo_importer['importer_type_id'], sync_start_timestamp, sync_end_timestamp, e, sys.exc_info()[2]) _LOG.exception(_('Exception caught from plugin during sync for repo [%(r)s]' % {'r' : repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2]
def repositories_with_yum_importers(): repo_importer_collection = RepoImporter.get_collection() repo_yum_importers = repo_importer_collection.find({'importer_type_id': _TYPE_YUM_IMPORTER}, fields=['repo_id']) yum_repo_ids = [i['repo_id'] for i in repo_yum_importers] repo_collection = Repo.get_collection() yum_repos = repo_collection.find({'id': {'$in': yum_repo_ids}}, fields=['id', 'scratchpad']) return list(yum_repos)
def _do_sync(repo, importer_instance, transfer_repo, conduit, call_config): """ Once all of the preparation for a sync has taken place, this call will perform the sync, making the necessary database updates. It returns the sync result instance (already saved to the database). This call does not have any behavior based on the success/failure of the sync; it is up to the caller to raise an exception in the event of a failed sync if that behavior is desired. """ importer_coll = RepoImporter.get_collection() sync_result_coll = RepoSyncResult.get_collection() repo_id = repo['id'] repo_importer = importer_coll.find_one({'repo_id': repo_id}) # Perform the sync sync_start_timestamp = _now_timestamp() sync_end_timestamp = None result = None try: # Replace the Importer's sync_repo() method with our register_sigterm_handler decorator, # which will set up cancel_sync_repo() as the target for the signal handler sync_repo = register_sigterm_handler(importer_instance.sync_repo, importer_instance.cancel_sync_repo) sync_report = sync_repo(transfer_repo, conduit, call_config) except Exception, e: sync_end_timestamp = _now_timestamp() result = RepoSyncResult.error_result( repo_id, repo_importer['id'], repo_importer['importer_type_id'], sync_start_timestamp, sync_end_timestamp, e, sys.exc_info()[2]) raise
def set_importer(repo_id, importer_type_id, repo_plugin_config): """ Configures an importer to be used for the given repository. Keep in mind this method is written assuming single importer for a repo. The domain model technically supports multiple importers, but this call is what enforces the single importer behavior. :param repo_id: identifies the repo :type repo_id: str :param importer_type_id: identifies the type of importer being added; must correspond to an importer loaded at server startup :type importer_type_id: str :param repo_plugin_config: configuration values for the importer; may be None :type repo_plugin_config: dict :raise MissingResource: if repo_id does not represent a valid repo :raise InvalidImporterConfiguration: if the importer cannot be initialized for the given repo """ repo_coll = Repo.get_collection() importer_coll = RepoImporter.get_collection() # Validation repo = repo_coll.find_one({'id' : repo_id}) if repo is None: raise MissingResource(repo_id) if not plugin_api.is_valid_importer(importer_type_id): raise InvalidValue(['importer_type_id']) importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id) # Convention is that a value of None means unset. Remove any keys that # are explicitly set to None so the plugin will default them. if repo_plugin_config is not None: clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None]) else: clean_config = None # Let the importer plugin verify the configuration call_config = PluginCallConfiguration(plugin_config, clean_config) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.importer_working_dir(importer_type_id, repo_id) try: result = importer_instance.validate_config(transfer_repo, call_config) # For backward compatibility with plugins that don't yet return the tuple if isinstance(result, bool): valid_config = result message = None else: valid_config, message = result except Exception, e: logger.exception( 'Exception received from importer [%s] while validating config' % importer_type_id) raise PulpDataException(e.args), None, sys.exc_info()[2]
def delete_by_importer_id(repo_id, importer_id): """ Delete all schedules for the specified repo and importer. :param importer_id: unique ID for an importer :type importer_id: basestring """ utils.delete_by_resource(RepoImporter.build_resource_tag(repo_id, importer_id))
def test_sync(self, mock_finished, mock_started): """ Tests sync under normal conditions where everything is configured correctly. No importer config is specified. """ # Setup sync_config = {'bruce': 'hulk', 'tony': 'ironman'} self.repo_manager.create_repo('repo-1') self.importer_manager.set_importer('repo-1', 'mock-importer', sync_config) # Test self.sync_manager.sync('repo-1', sync_config_override=None) # Verify repo = Repo.get_collection().find_one({'id': 'repo-1'}) repo_importer = RepoImporter.get_collection().find_one({'repo_id': 'repo-1', 'id': 'mock-importer'}) # Database self.assertTrue(repo_importer['last_sync'] is not None) self.assertTrue(assert_last_sync_time(repo_importer['last_sync'])) # Call into the Importer sync_args = mock_plugins.MOCK_IMPORTER.sync_repo.call_args[0] self.assertEqual(repo['id'], sync_args[0].id) self.assertTrue(sync_args[1] is not None) self.assertEqual({}, sync_args[2].plugin_config) self.assertEqual(sync_config, sync_args[2].repo_plugin_config) self.assertEqual({}, sync_args[2].override_config) # History Entry history = list(RepoSyncResult.get_collection().find({'repo_id': 'repo-1'})) self.assertEqual(1, len(history)) self.assertEqual('repo-1', history[0]['repo_id']) self.assertEqual(RepoSyncResult.RESULT_SUCCESS, history[0]['result']) self.assertEqual('mock-importer', history[0]['importer_id']) self.assertEqual('mock-importer', history[0]['importer_type_id']) self.assertTrue(history[0]['started'] is not None) self.assertTrue(history[0]['completed'] is not None) self.assertEqual(10, history[0]['added_count']) self.assertEqual(1, history[0]['removed_count']) self.assertTrue(history[0]['summary'] is not None) self.assertTrue(history[0]['details'] is not None) self.assertTrue(history[0]['error_message'] is None) self.assertTrue(history[0]['exception'] is None) self.assertTrue(history[0]['traceback'] is None) self.assertEqual(1, mock_started.call_count) self.assertEqual('repo-1', mock_started.call_args[0][0]) self.assertEqual(1, mock_finished.call_count) self.assertEqual('repo-1', mock_finished.call_args[0][0]['repo_id'])
def setUp(self): WebTest.setUp(self) self.parentfs = self.tmpdir('parent-') self.childfs = self.tmpdir('child-') self.alias = (self.parentfs, self.parentfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() plugin_api._create_manager() imp_conf = dict(strategy=constants.MIRROR_STRATEGY) plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, imp_conf) plugin_api._MANAGER.distributors.add_plugin(constants.HTTP_DISTRIBUTOR, NodesHttpDistributor, {}) plugin_api._MANAGER.distributors.add_plugin(FAKE_DISTRIBUTOR, FakeDistributor, FAKE_DISTRIBUTOR_CONFIG) plugin_api._MANAGER.profilers.add_plugin(constants.PROFILER_ID, NodeProfiler, {})
def test_sync_with_error(self): """ Tests a sync when the plugin raises an error. """ # Setup class FakePluginException(Exception): pass error_msg = 'Error test' mock_plugins.MOCK_IMPORTER.sync_repo.side_effect = FakePluginException( error_msg) self.repo_manager.create_repo('gonna-bail') self.importer_manager.set_importer('gonna-bail', 'mock-importer', {}) # Test self.assertRaises(Exception, self.sync_manager.sync, 'gonna-bail') # Verify # Database repo_importer = RepoImporter.get_collection().find_one({ 'repo_id': 'gonna-bail', 'id': 'mock-importer' }) self.assertTrue(repo_importer['last_sync'] is not None) self.assertTrue(assert_last_sync_time(repo_importer['last_sync'])) # History Entry history = list(RepoSyncResult.get_collection().find( {'repo_id': 'gonna-bail'})) self.assertEqual(1, len(history)) self.assertEqual('gonna-bail', history[0]['repo_id']) self.assertEqual(RepoSyncResult.RESULT_ERROR, history[0]['result']) self.assertEqual('mock-importer', history[0]['importer_id']) self.assertEqual('mock-importer', history[0]['importer_type_id']) self.assertTrue(history[0]['started'] is not None) self.assertTrue(history[0]['completed'] is not None) self.assertTrue(history[0]['added_count'] is None) self.assertTrue(history[0]['updated_count'] is None) self.assertTrue(history[0]['removed_count'] is None) self.assertTrue(history[0]['summary'] is None) self.assertTrue(history[0]['details'] is None) self.assertEqual(error_msg, history[0]['error_message']) self.assertTrue('FakePluginException' in history[0]['exception']) self.assertTrue(history[0]['traceback'] is not None) # Cleanup mock_plugins.MOCK_IMPORTER.sync_repo.side_effect = None
def test_set_importer(self): """ Tests setting an importer on a new repo (normal case). """ # Setup self.repo_manager.create_repo('importer-test') importer_config = {'key1': 'value1', 'key2': None} # Test created = self.importer_manager.set_importer('importer-test', 'mock-importer', importer_config) # Verify expected_config = {'key1': 'value1'} # Database importer = RepoImporter.get_collection().find_one({ 'repo_id': 'importer-test', 'id': 'mock-importer' }) self.assertEqual('importer-test', importer['repo_id']) self.assertEqual('mock-importer', importer['id']) self.assertEqual('mock-importer', importer['importer_type_id']) self.assertEqual(expected_config, importer['config']) # Return Value self.assertEqual('importer-test', created['repo_id']) self.assertEqual('mock-importer', created['id']) self.assertEqual('mock-importer', created['importer_type_id']) self.assertEqual(expected_config, created['config']) # Plugin - Validate Config self.assertEqual(1, mock_plugins.MOCK_IMPORTER.importer_added.call_count) call_repo = mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][0] call_config = mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][ 1] self.assertTrue(isinstance(call_repo, Repository)) self.assertEqual('importer-test', call_repo.id) self.assertTrue(isinstance(call_config, PluginCallConfiguration)) self.assertTrue(call_config.plugin_config is not None) self.assertEqual(call_config.repo_plugin_config, expected_config) # Plugin - Importer Added self.assertEqual(1, mock_plugins.MOCK_IMPORTER.validate_config.call_count) call_repo = mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][0] call_config = mock_plugins.MOCK_IMPORTER.validate_config.call_args[0][ 1] self.assertTrue(isinstance(call_repo, Repository)) self.assertTrue(isinstance(call_config, PluginCallConfiguration))
def create(cls, repo_id, importer_id, sync_options, schedule, failure_threshold=None, enabled=True): """ Create a new sync schedule for a given repository using the given importer. :param repo_id: unique ID for a repository :type repo_id: basestring :param importer_id: unique ID for an importer :type importer_id: basestring :param sync_options: dictionary that contains the key 'override_config', whose value should be passed as the 'overrides' parameter to the sync task. This wasn't originally documented, so it isn't clear why overrides value couldn't be passed directly. :type sync_options: dict :param schedule_data: dictionary that contains the key 'schedule', whose value is an ISO8601 string. This wasn't originally documented, so it isn't clear why the string itself couldn't have been passed directly. :type schedule_data: dict :return: new schedule instance :rtype: pulp.server.db.model.dispatch.ScheduledCall """ # validate the input cls.validate_importer(repo_id, importer_id) utils.validate_keys(sync_options, _SYNC_OPTION_KEYS) utils.validate_initial_schedule_options(schedule, failure_threshold, enabled) task = sync_with_auto_publish.name args = [repo_id] kwargs = {'overrides': sync_options['override_config']} resource = RepoImporter.build_resource_tag(repo_id, importer_id) schedule = ScheduledCall(schedule, task, args=args, kwargs=kwargs, resource=resource, failure_threshold=failure_threshold, enabled=enabled) schedule.save() try: cls.validate_importer(repo_id, importer_id) except exceptions.MissingResource: # back out of this whole thing, since the importer disappeared utils.delete(schedule.id) raise return schedule
def list_sync_schedules(self, repo_id): """ List the sync schedules currently defined for the repo. @param repo_id: @return: """ collection = RepoImporter.get_collection() importer = collection.find_one({'repo_id': repo_id}) if importer is None: raise MissingResource(importer=repo_id) return importer['scheduled_syncs']