def test_publish_history_ascending_sort(self): """ Tests use the sort parameter to sort the results in ascending order by start time """ # Setup self.repo_manager.create_repo('test_sort') self.distributor_manager.add_distributor('test_sort', 'mock-distributor', {}, True, distributor_id='test_dist') # Create some consecutive publish entries date_string = '2013-06-01T12:00:0%sZ' for i in range(0, 10, 2): r = RepoPublishResult.expected_result( 'test_sort', 'test_dist', 'bar', date_string % str(i), date_string % str(i + 1), 'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS) RepoPublishResult.get_collection().insert(r, safe=True) # Test that returned entries are in ascending order by time entries = self.publish_manager.publish_history('test_sort', 'test_dist', sort=constants.SORT_ASCENDING) self.assertEqual(5, len(entries)) for i in range(0, 4): first = dateutils.parse_iso8601_datetime(entries[i]['started']) second = dateutils.parse_iso8601_datetime(entries[i + 1]['started']) self.assertTrue(first < second)
def _do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler( distributor_instance.publish_repo, distributor_instance.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one( {'repo_id' : repo_id, 'id' : distributor_id}) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r' : repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2]
def test_publish_history_start_date(self): # Setup self.repo_manager.create_repo('test_date') self.distributor_manager.add_distributor('test_date', 'mock-distributor', {}, True, distributor_id='test_dist') # Create three consecutive publish entries date_string = '2013-06-01T12:00:0%sZ' for i in range(0, 6, 2): r = RepoPublishResult.expected_result( 'test_date', 'test_dist', 'bar', date_string % str(i), date_string % str(i + 1), 'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS) RepoPublishResult.get_collection().insert(r, safe=True) # Verify self.assertEqual(3, len(self.publish_manager.publish_history('test_date', 'test_dist'))) start_date = '2013-06-01T12:00:02Z' start_entries = self.publish_manager.publish_history('test_date', 'test_dist', start_date=start_date) # Confirm the dates of the retrieved entries are later than or equal to the requested date self.assertEqual(2, len(start_entries)) for entries in start_entries: retrieved = dateutils.parse_iso8601_datetime(entries['started']) given_start = dateutils.parse_iso8601_datetime(start_date) self.assertTrue(retrieved >= given_start)
def test_publish_history_descending_sort(self): """ Tests use the sort parameter to sort the results in descending order by start time """ # Setup self.repo_manager.create_repo("test_sort") self.distributor_manager.add_distributor("test_sort", "mock-distributor", {}, True, distributor_id="test_dist") # Create some consecutive publish entries date_string = "2013-06-01T12:00:0%sZ" for i in range(0, 10, 2): r = RepoPublishResult.expected_result( "test_sort", "test_dist", "bar", date_string % str(i), date_string % str(i + 1), "test-summary", "test-details", RepoPublishResult.RESULT_SUCCESS, ) RepoPublishResult.get_collection().insert(r, safe=True) # Test that returned entries are in descending order by time entries = self.publish_manager.publish_history("test_sort", "test_dist", sort=constants.SORT_DESCENDING) self.assertEqual(5, len(entries)) for i in range(0, 4): first = dateutils.parse_iso8601_datetime(entries[i]["started"]) second = dateutils.parse_iso8601_datetime(entries[i + 1]["started"]) self.assertTrue(first > second)
def test_publish_history_end_date(self): # Setup self.repo_manager.create_repo("test_date") self.distributor_manager.add_distributor("test_date", "mock-distributor", {}, True, distributor_id="test_dist") # Create three consecutive publish entries date_string = "2013-06-01T12:00:0%sZ" for i in range(0, 6, 2): r = RepoPublishResult.expected_result( "test_date", "test_dist", "bar", date_string % str(i), date_string % str(i + 1), "test-summary", "test-details", RepoPublishResult.RESULT_SUCCESS, ) RepoPublishResult.get_collection().insert(r, safe=True) # Verify that all entries retrieved have dates prior to the given end date end_date = "2013-06-01T12:00:03Z" end_entries = self.publish_manager.publish_history("test_date", "test_dist", end_date=end_date) # Confirm the dates of the retrieved entries are earlier than or equal to the requested date self.assertEqual(2, len(end_entries)) for entries in end_entries: retrieved = dateutils.parse_iso8601_datetime(entries["started"]) given_end = dateutils.parse_iso8601_datetime(end_date) self.assertTrue(retrieved <= given_end)
def _do_publish(self, repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: publish_report = distributor_instance.publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id}) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result(repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _LOG.exception(_('Exception caught from plugin during publish for repo [%(r)s]' % {'r' : repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2]
def add_result(repo_id, dist_id, offset): started = dateutils.now_utc_datetime_with_tzinfo() completed = started + datetime.timedelta(days=offset) r = RepoPublishResult.expected_result( repo_id, dist_id, 'bar', dateutils.format_iso8601_datetime(started), dateutils.format_iso8601_datetime(completed), 'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS) RepoPublishResult.get_collection().insert(r, safe=True)
def _do_publish(repo_obj, dist_id, dist_inst, transfer_repo, conduit, call_config): """ Publish the repository using the given distributor. :param repo_obj: repository object :type repo_obj: pulp.server.db.model.Repository :param dist_id: identifies the distributor :type dist_id: str :param dist_inst: instance of the distributor :type dist_inst: dict :param transfer_repo: dict representation of a repo for the plugins to use :type transfer_repo: pulp.plugins.model.Repository :param conduit: allows the plugin to interact with core pulp :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param call_config: allows the plugin to retrieve values :type call_config: pulp.plugins.config.PluginCallConfiguration :return: publish result containing information about the publish :rtype: pulp.server.db.model.repository.RepoPublishResult :raises pulp_exceptions.PulpCodedException: if the publish report's success flag is falsey """ distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler(dist_inst.publish_repo, dist_inst.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) if publish_report is not None and hasattr(publish_report, 'success_flag') \ and not publish_report.success_flag: raise pulp_exceptions.PulpCodedException( error_code=error_codes.PLP0034, repository_id=repo_obj.repo_id, distributor_id=dist_id ) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one( {'repo_id': repo_obj.repo_id, 'id': dist_id}) distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_obj.repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_obj.repo_id})) raise
def add_result(repo_id, dist_id, offset): started = datetime.datetime.now(dateutils.local_tz()) completed = started + datetime.timedelta(days=offset) r = RepoPublishResult.expected_result( repo_id, dist_id, "bar", dateutils.format_iso8601_datetime(started), dateutils.format_iso8601_datetime(completed), "test-summary", "test-details", RepoPublishResult.RESULT_SUCCESS, ) RepoPublishResult.get_collection().insert(r, safe=True)
def __init__(self, repo, publish_conduit, config, distributor_type): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param distributor_type: The type of the distributor that is being published :type distributor_type: str :ivar last_published: last time this distributor published the repo :ivar last_delete: last time a unit was removed from this repository :ivar repo: repository being operated on :ivar predistributor: distributor object that is associated with this distributor. It's publish history affects the type of publish is performed :ivar symlink_list: list of symlinks to rsync :ivar content_unit_file_list: list of content units to rsync :ivar symlink_src: path to directory containing all symlinks """ super(Publisher, self).__init__("Repository publish", repo, publish_conduit, config, distributor_type=distributor_type) distributor = Distributor.objects.get_or_404(repo_id=self.repo.id, distributor_id=publish_conduit.distributor_id) self.last_published = distributor["last_publish"] self.last_deleted = repo.last_unit_removed self.repo = repo self.predistributor = self._get_predistributor() if self.last_published: string_date = dateutils.format_iso8601_datetime(self.last_published) else: string_date = None if self.predistributor: search_params = {'repo_id': repo.id, 'distributor_id': self.predistributor["id"], 'started': {"$gte": string_date}} self.predist_history = RepoPublishResult.get_collection().find(search_params) else: self.predist_history = [] self.remote_path = self.get_remote_repo_path() if self.is_fastforward(): start_date = self.last_published end_date = None if self.predistributor: end_date = self.predistributor["last_publish"] date_filter = self.create_date_range_filter(start_date=start_date, end_date=end_date) else: date_filter = None self.symlink_list = [] self.content_unit_file_list = [] self.symlink_src = os.path.join(self.get_working_dir(), '.relative/') self._add_necesary_steps(date_filter=date_filter, config=config)
def publish_history(start_date, end_date, repo_id, distributor_id): """ Returns a cursor containing the publish history entries for the given repo and distributor. :param start_date: if specified, no events prior to this date will be returned. :type start_date: iso8601 datetime string :param end_date: if specified, no events after this date will be returned. :type end_date: iso8601 datetime string :param repo_id: identifies the repo :type repo_id: str :param distributor_id: identifies the distributor to retrieve history for :type distributor_id: str :return: object containing publish history results :rtype: pymongo.cursor.Cursor :raise pulp_exceptions.MissingResource: if repo/distributor pair is invalid """ model.Repository.objects.get_repo_or_missing_resource(repo_id) dist = RepoDistributor.get_collection().find_one({'repo_id': repo_id, 'id': distributor_id}) if dist is None: raise pulp_exceptions.MissingResource(distributor_id) search_params = {'repo_id': repo_id, 'distributor_id': distributor_id} date_range = {} if start_date: date_range['$gte'] = start_date if end_date: date_range['$lte'] = end_date if len(date_range) > 0: search_params['started'] = date_range return RepoPublishResult.get_collection().find(search_params)
def test_publish_with_error(self): """ Tests a publish when the plugin raises an error. """ # Setup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = Exception() self.repo_manager.create_repo("gonna-bail") self.distributor_manager.add_distributor("gonna-bail", "mock-distributor", {}, False, distributor_id="bad-dist") self.assertRaises(Exception, self.publish_manager.publish, "gonna-bail", "bad-dist") # Verify repo_distributor = RepoDistributor.get_collection().find_one({"repo_id": "gonna-bail", "id": "bad-dist"}) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time(repo_distributor["last_publish"])) entries = list(RepoPublishResult.get_collection().find({"repo_id": "gonna-bail"})) self.assertEqual(1, len(entries)) self.assertEqual("gonna-bail", entries[0]["repo_id"]) self.assertEqual("bad-dist", entries[0]["distributor_id"]) self.assertEqual("mock-distributor", entries[0]["distributor_type_id"]) self.assertTrue(entries[0]["started"] is not None) self.assertTrue(entries[0]["completed"] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]["result"]) self.assertTrue(entries[0]["summary"] is None) self.assertTrue(entries[0]["details"] is None) self.assertTrue(entries[0]["error_message"] is not None) self.assertTrue(entries[0]["exception"] is not None) self.assertTrue(entries[0]["traceback"] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def test_publish_failure_report(self): """ Tests a publish call that indicates a graceful failure. """ # Setup publish_config = {'foo' : 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = PublishReport(False, 'Summary of the publish', 'Details of the publish') # Test self.publish_manager.publish('repo-1', 'dist-1', None) # Verify entries = list(RepoPublishResult.get_collection().find({'repo_id' : 'repo-1'})) self.assertEqual(1, len(entries)) self.assertEqual('repo-1', entries[0]['repo_id']) self.assertEqual('dist-1', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_FAILED, entries[0]['result']) self.assertTrue(entries[0]['summary'] is not None) self.assertTrue(entries[0]['details'] is not None) self.assertTrue(entries[0]['error_message'] is None) self.assertTrue(entries[0]['exception'] is None) self.assertTrue(entries[0]['traceback'] is None) # Cleanup mock_plugins.reset()
def test_publish_failure_report(self): """ Tests a publish call that indicates a graceful failure. """ # Setup publish_config = {"foo": "bar"} self.repo_manager.create_repo("repo-1") self.distributor_manager.add_distributor( "repo-1", "mock-distributor", publish_config, False, distributor_id="dist-1" ) mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = PublishReport( False, "Summary of the publish", "Details of the publish" ) # Test self.publish_manager.publish("repo-1", "dist-1", None) # Verify entries = list(RepoPublishResult.get_collection().find({"repo_id": "repo-1"})) self.assertEqual(1, len(entries)) self.assertEqual("repo-1", entries[0]["repo_id"]) self.assertEqual("dist-1", entries[0]["distributor_id"]) self.assertEqual("mock-distributor", entries[0]["distributor_type_id"]) self.assertTrue(entries[0]["started"] is not None) self.assertTrue(entries[0]["completed"] is not None) self.assertEqual(RepoPublishResult.RESULT_FAILED, entries[0]["result"]) self.assertTrue(entries[0]["summary"] is not None) self.assertTrue(entries[0]["details"] is not None) self.assertTrue(entries[0]["error_message"] is None) self.assertTrue(entries[0]["exception"] is None) self.assertTrue(entries[0]["traceback"] is None) # Cleanup mock_plugins.reset()
def test_publish(self, mock_finished, mock_started): """ Tests publish under normal conditions when everything is configured correctly. """ # Setup publish_config = {"foo": "bar"} self.repo_manager.create_repo("repo-1") self.distributor_manager.add_distributor( "repo-1", "mock-distributor", publish_config, False, distributor_id="dist-1" ) self.distributor_manager.add_distributor( "repo-1", "mock-distributor-2", publish_config, False, distributor_id="dist-2" ) # Test distributor, config = self.publish_manager._get_distributor_instance_and_config("repo-1", "dist-1") self.publish_manager.publish("repo-1", "dist-1", distributor, config, None) # Verify # Database repo_distributor = RepoDistributor.get_collection().find_one({"repo_id": "repo-1", "id": "dist-1"}) self.assertTrue(repo_distributor["last_publish"] is not None) self.assertTrue(assert_last_sync_time(repo_distributor["last_publish"])) # History entries = list(RepoPublishResult.get_collection().find({"repo_id": "repo-1"})) self.assertEqual(1, len(entries)) self.assertEqual("repo-1", entries[0]["repo_id"]) self.assertEqual("dist-1", entries[0]["distributor_id"]) self.assertEqual("mock-distributor", entries[0]["distributor_type_id"]) self.assertTrue(entries[0]["started"] is not None) self.assertTrue(entries[0]["completed"] is not None) self.assertEqual(RepoPublishResult.RESULT_SUCCESS, entries[0]["result"]) self.assertTrue(entries[0]["summary"] is not None) self.assertTrue(entries[0]["details"] is not None) self.assertTrue(entries[0]["error_message"] is None) self.assertTrue(entries[0]["exception"] is None) self.assertTrue(entries[0]["traceback"] is None) # Call into the correct distributor call_args = mock_plugins.MOCK_DISTRIBUTOR.publish_repo.call_args[0] self.assertEqual("repo-1", call_args[0].id) self.assertTrue(call_args[1] is not None) self.assertEqual({}, call_args[2].plugin_config) self.assertEqual(publish_config, call_args[2].repo_plugin_config) self.assertEqual({}, call_args[2].override_config) self.assertEqual(0, mock_plugins.MOCK_DISTRIBUTOR_2.publish_repo.call_count) self.assertEqual(1, mock_started.call_count) self.assertEqual("repo-1", mock_started.call_args[0][0]) self.assertEqual(1, mock_finished.call_count) self.assertEqual("repo-1", mock_finished.call_args[0][0]["repo_id"])
def test_publish(self, mock_finished, mock_started, mock_get_working_directory, dt): """ Tests publish under normal conditions when everything is configured correctly. """ # Setup publish_config = {'foo': 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor-2', publish_config, False, distributor_id='dist-2') dt.utcnow.return_value = 1234 # Test self.publish_manager.publish('repo-1', 'dist-1', None) # Verify # Database repo_distributor = RepoDistributor.get_collection().find_one({'repo_id': 'repo-1', 'id': 'dist-1'}) self.assertTrue(repo_distributor['last_publish'] is not None) self.assertEqual(repo_distributor['last_publish'], dt.utcnow.return_value) # History entries = list(RepoPublishResult.get_collection().find({'repo_id': 'repo-1'})) self.assertEqual(1, len(entries)) self.assertEqual('repo-1', entries[0]['repo_id']) self.assertEqual('dist-1', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_SUCCESS, entries[0]['result']) self.assertTrue(entries[0]['summary'] is not None) self.assertTrue(entries[0]['details'] is not None) self.assertTrue(entries[0]['error_message'] is None) self.assertTrue(entries[0]['exception'] is None) self.assertTrue(entries[0]['traceback'] is None) # Call into the correct distributor call_args = mock_plugins.MOCK_DISTRIBUTOR.publish_repo.call_args[0] self.assertEqual('repo-1', call_args[0].id) self.assertTrue(call_args[1] is not None) self.assertEqual({}, call_args[2].plugin_config) self.assertEqual(publish_config, call_args[2].repo_plugin_config) self.assertEqual({}, call_args[2].override_config) self.assertEqual(0, mock_plugins.MOCK_DISTRIBUTOR_2.publish_repo.call_count) self.assertEqual(1, mock_started.call_count) self.assertEqual('repo-1', mock_started.call_args[0][0]) self.assertEqual(1, mock_finished.call_count) self.assertEqual('repo-1', mock_finished.call_args[0][0]['repo_id'])
def _do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler( distributor_instance.publish_repo, distributor_instance.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_id})) raise
def publish_history(self, repo_id, distributor_id, limit=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to retrieve history for @type distributor_id: str @param limit: maximum number of results to return @type limit: int @return: list of publish history result instances @rtype: list of L{pulp.server.db.model.repository.RepoPublishResult} @raise MissingResource: if repo_id does not reference a valid repo """ # Validation repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if dist is None: raise MissingResource(distributor_id) if limit is None: limit = 10 # default here for each of REST API calls into here # Retrieve the entries cursor = RepoPublishResult.get_collection().find({ 'repo_id': repo_id, 'distributor_id': distributor_id }) cursor.limit(limit) cursor.sort('completed', pymongo.DESCENDING) return list(cursor)
def test_publish_with_error(self): """ Tests a publish when the plugin raises an error. """ # Setup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = Exception() self.repo_manager.create_repo('gonna-bail') self.distributor_manager.add_distributor('gonna-bail', 'mock-distributor', {}, False, distributor_id='bad-dist') self.assertRaises(Exception, self.publish_manager.publish, 'gonna-bail', 'bad-dist') # Verify repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'gonna-bail', 'id': 'bad-dist' }) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'gonna-bail'})) self.assertEqual(1, len(entries)) self.assertEqual('gonna-bail', entries[0]['repo_id']) self.assertEqual('bad-dist', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]['result']) self.assertTrue(entries[0]['summary'] is None) self.assertTrue(entries[0]['details'] is None) self.assertTrue(entries[0]['error_message'] is not None) self.assertTrue(entries[0]['exception'] is not None) self.assertTrue(entries[0]['traceback'] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def test_publish_no_plugin_report(self): """ Tests publishing against a sloppy plugin that doesn't return a report. """ # Setup self.repo_manager.create_repo('sloppy') self.distributor_manager.add_distributor('sloppy', 'mock-distributor', {}, True, distributor_id='slop') mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = None # lame plugin # Test self.publish_manager.publish('sloppy', 'slop') # Verify entries = list(RepoPublishResult.get_collection().find({'repo_id' : 'sloppy'})) self.assertEqual(1, len(entries)) self.assertEqual('Unknown', entries[0]['summary']) self.assertEqual('Unknown', entries[0]['details'])
def test_publish_no_plugin_report(self): """ Tests publishing against a sloppy plugin that doesn't return a report. """ # Setup self.repo_manager.create_repo("sloppy") self.distributor_manager.add_distributor("sloppy", "mock-distributor", {}, True, distributor_id="slop") mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = None # lame plugin # Test self.publish_manager.publish("sloppy", "slop") # Verify entries = list(RepoPublishResult.get_collection().find({"repo_id": "sloppy"})) self.assertEqual(1, len(entries)) self.assertEqual("Unknown", entries[0]["summary"]) self.assertEqual("Unknown", entries[0]["details"])
def test_publish_failure_report(self): """ Tests a publish call that indicates a graceful failure. """ # Setup publish_config = {'foo': 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = PublishReport( False, 'Summary of the publish', 'Details of the publish') # Test report = self.publish_manager.publish('repo-1', 'dist-1', None) # Verify entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'repo-1'})) self.assertEqual(1, len(entries)) for check_me in entries[0], report: self.assertEqual('repo-1', check_me['repo_id']) self.assertEqual('dist-1', check_me['distributor_id']) self.assertEqual('mock-distributor', check_me['distributor_type_id']) self.assertTrue(check_me['started'] is not None) self.assertTrue(check_me['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_FAILED, check_me['result']) self.assertTrue(check_me['summary'] is not None) self.assertTrue(check_me['details'] is not None) self.assertTrue(check_me['error_message'] is None) self.assertTrue(check_me['exception'] is None) self.assertTrue(check_me['traceback'] is None) # Cleanup mock_plugins.reset()
def publish_history(self, repo_id, distributor_id, limit=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to retrieve history for @type distributor_id: str @param limit: maximum number of results to return @type limit: int @return: list of publish history result instances @rtype: list of L{pulp.server.db.model.repository.RepoPublishResult} @raise MissingResource: if repo_id does not reference a valid repo """ # Validation repo = Repo.get_collection().find_one({'id' : repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({'repo_id' : repo_id, 'id' : distributor_id}) if dist is None: raise MissingResource(distributor_id) if limit is None: limit = 10 # default here for each of REST API calls into here # Retrieve the entries cursor = RepoPublishResult.get_collection().find({'repo_id' : repo_id, 'distributor_id' : distributor_id}) cursor.limit(limit) cursor.sort('completed', pymongo.DESCENDING) return list(cursor)
def test_publish_with_error(self, mock_get_working_directory): """ Tests a publish when the plugin raises an error. """ # Setup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = Exception() self.repo_manager.create_repo('gonna-bail') self.distributor_manager.add_distributor('gonna-bail', 'mock-distributor', {}, False, distributor_id='bad-dist') self.assertRaises(Exception, self.publish_manager.publish, 'gonna-bail', 'bad-dist') # Verify repo_distributor = RepoDistributor.get_collection().find_one( {'repo_id': 'gonna-bail', 'id': 'bad-dist'}) self.assertTrue(repo_distributor is not None) self.assertEqual(repo_distributor['last_publish'], None) entries = list(RepoPublishResult.get_collection().find({'repo_id': 'gonna-bail'})) self.assertEqual(1, len(entries)) self.assertEqual('gonna-bail', entries[0]['repo_id']) self.assertEqual('bad-dist', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]['result']) self.assertTrue(entries[0]['summary'] is None) self.assertTrue(entries[0]['details'] is None) self.assertTrue(entries[0]['error_message'] is not None) self.assertTrue(entries[0]['exception'] is not None) self.assertTrue(entries[0]['traceback'] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def test_publish_no_plugin_report(self): """ Tests publishing against a sloppy plugin that doesn't return a report. """ # Setup self.repo_manager.create_repo('sloppy') self.distributor_manager.add_distributor('sloppy', 'mock-distributor', {}, True, distributor_id='slop') mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = None # lame plugin # Test self.publish_manager.publish('sloppy', 'slop') # Verify entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'sloppy'})) self.assertEqual(1, len(entries)) self.assertEqual('Unknown', entries[0]['summary']) self.assertEqual('Unknown', entries[0]['details'])
def _do_publish(repo_obj, dist_id, dist_inst, transfer_repo, conduit, call_config): """ Publish the repository using the given distributor. :param repo_obj: repository object :type repo_obj: pulp.server.db.model.Repository :param dist_id: identifies the distributor :type dist_id: str :param dist_inst: instance of the distributor :type dist_inst: dict :param transfer_repo: dict representation of a repo for the plugins to use :type transfer_repo: pulp.plugins.model.Repository :param conduit: allows the plugin to interact with core pulp :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param call_config: allows the plugin to retrieve values :type call_config: pulp.plugins.config.PluginCallConfiguration :return: publish result containing information about the publish :rtype: pulp.server.db.model.repository.RepoPublishResult :raises pulp_exceptions.PulpCodedException: if the publish report's success flag is falsey """ distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler(dist_inst.publish_repo, dist_inst.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) if publish_report is not None and hasattr(publish_report, 'success_flag') \ and not publish_report.success_flag: raise pulp_exceptions.PulpCodedException( error_code=error_codes.PLP0034, repository_id=repo_obj.repo_id, distributor_id=dist_id) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_obj.repo_id, 'id': dist_id }) distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_obj.repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_obj.repo_id})) raise
self.distributor_manager.add_distributor('gonna-bail', 'mock-distributor', {}, False, distributor_id='bad-dist') # Test try: self.publish_manager.publish('gonna-bail', 'bad-dist') self.fail('Expected exception was not raised') except publish_manager.PulpExecutionException, e: print(e) # for coverage # Verify repo_distributor = RepoDistributor.get_collection().find_one({'repo_id' : 'gonna-bail', 'id' : 'bad-dist'}) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time(repo_distributor['last_publish'])) entries = list(RepoPublishResult.get_collection().find({'repo_id' : 'gonna-bail'})) self.assertEqual(1, len(entries)) self.assertEqual('gonna-bail', entries[0]['repo_id']) self.assertEqual('bad-dist', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]['result']) self.assertTrue(entries[0]['summary'] is None) self.assertTrue(entries[0]['details'] is None) self.assertTrue(entries[0]['error_message'] is not None) self.assertTrue(entries[0]['exception'] is not None) self.assertTrue(entries[0]['traceback'] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def clean(self): super(RepoSyncManagerTests, self).clean() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoPublishResult.get_collection().remove()
self.distributor_manager.add_distributor("gonna-bail", "mock-distributor", {}, False, distributor_id="bad-dist") # Test try: self.publish_manager.publish("gonna-bail", "bad-dist") self.fail("Expected exception was not raised") except publish_manager.PulpExecutionException, e: print(e) # for coverage # Verify repo_distributor = RepoDistributor.get_collection().find_one({"repo_id": "gonna-bail", "id": "bad-dist"}) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time(repo_distributor["last_publish"])) entries = list(RepoPublishResult.get_collection().find({"repo_id": "gonna-bail"})) self.assertEqual(1, len(entries)) self.assertEqual("gonna-bail", entries[0]["repo_id"]) self.assertEqual("bad-dist", entries[0]["distributor_id"]) self.assertEqual("mock-distributor", entries[0]["distributor_type_id"]) self.assertTrue(entries[0]["started"] is not None) self.assertTrue(entries[0]["completed"] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]["result"]) self.assertTrue(entries[0]["summary"] is None) self.assertTrue(entries[0]["details"] is None) self.assertTrue(entries[0]["error_message"] is not None) self.assertTrue(entries[0]["exception"] is not None) self.assertTrue(entries[0]["traceback"] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def test_publish(self, mock_finished, mock_started): """ Tests publish under normal conditions when everything is configured correctly. """ # Setup publish_config = {'foo': 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor-2', publish_config, False, distributor_id='dist-2') # Test self.publish_manager.publish('repo-1', 'dist-1', None) # Verify # Database repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'repo-1', 'id': 'dist-1' }) self.assertTrue(repo_distributor['last_publish'] is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) # History entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'repo-1'})) self.assertEqual(1, len(entries)) self.assertEqual('repo-1', entries[0]['repo_id']) self.assertEqual('dist-1', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_SUCCESS, entries[0]['result']) self.assertTrue(entries[0]['summary'] is not None) self.assertTrue(entries[0]['details'] is not None) self.assertTrue(entries[0]['error_message'] is None) self.assertTrue(entries[0]['exception'] is None) self.assertTrue(entries[0]['traceback'] is None) # Call into the correct distributor call_args = mock_plugins.MOCK_DISTRIBUTOR.publish_repo.call_args[0] self.assertEqual('repo-1', call_args[0].id) self.assertTrue(call_args[1] is not None) self.assertEqual({}, call_args[2].plugin_config) self.assertEqual(publish_config, call_args[2].repo_plugin_config) self.assertEqual({}, call_args[2].override_config) self.assertEqual( 0, mock_plugins.MOCK_DISTRIBUTOR_2.publish_repo.call_count) self.assertEqual(1, mock_started.call_count) self.assertEqual('repo-1', mock_started.call_args[0][0]) self.assertEqual(1, mock_finished.call_count) self.assertEqual('repo-1', mock_finished.call_args[0][0]['repo_id'])
(repo_distributor['id'], repo_id)) error_tuples.append(e) # Database Updates repo = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo.delete() try: # Remove all importers and distributors from the repo. This is likely already done by the # calls to other methods in this manager, but in case those failed we still want to attempt # to keep the database clean. RepoDistributor.get_collection().remove({'repo_id': repo_id}, safe=True) RepoImporter.get_collection().remove({'repo_id': repo_id}, safe=True) RepoSyncResult.get_collection().remove({'repo_id': repo_id}, safe=True) RepoPublishResult.get_collection().remove({'repo_id': repo_id}, safe=True) RepoContentUnit.get_collection().remove({'repo_id': repo_id}, safe=True) except Exception, e: msg = _( 'Error updating one or more database collections while removing repo [%(r)s]' ) msg = msg % {'r': repo_id} _logger.exception(msg) error_tuples.append(e) # remove the repo from any groups it was a member of group_manager = manager_factory.repo_group_manager() group_manager.remove_repo_from_groups(repo_id) if len(error_tuples) > 0:
repo_working_dir, repo_id)) error_tuples.append(e) # Database Updates try: Repo.get_collection().remove({'id' : repo_id}, safe=True) # Remove all importers and distributors from the repo # This is likely already done by the calls to other methods in # this manager, but in case those failed we still want to attempt # to keep the database clean RepoDistributor.get_collection().remove({'repo_id' : repo_id}, safe=True) RepoImporter.get_collection().remove({'repo_id' : repo_id}, safe=True) RepoSyncResult.get_collection().remove({'repo_id' : repo_id}, safe=True) RepoPublishResult.get_collection().remove({'repo_id' : repo_id}, safe=True) # Remove all associations from the repo RepoContentUnit.get_collection().remove({'repo_id' : repo_id}, safe=True) except Exception, e: msg = _('Error updating one or more database collections while removing repo [%(r)s]') msg = msg % {'r': repo_id} logger.exception(msg) error_tuples.append(e) # remove the repo from any groups it was a member of group_manager = manager_factory.repo_group_manager() group_manager.remove_repo_from_groups(repo_id) if len(error_tuples) > 0: pe = PulpExecutionException()
except publish_manager.PulpExecutionException, e: print(e) # for coverage # Verify repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'gonna-bail', 'id': 'bad-dist' }) self.assertTrue(repo_distributor is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'gonna-bail'})) self.assertEqual(1, len(entries)) self.assertEqual('gonna-bail', entries[0]['repo_id']) self.assertEqual('bad-dist', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, entries[0]['result']) self.assertTrue(entries[0]['summary'] is None) self.assertTrue(entries[0]['details'] is None) self.assertTrue(entries[0]['error_message'] is not None) self.assertTrue(entries[0]['exception'] is not None) self.assertTrue(entries[0]['traceback'] is not None) # Cleanup mock_plugins.MOCK_DISTRIBUTOR.publish_repo.side_effect = None
def tearDown(self): super(TestDoPublish, self).tearDown() mock_plugins.reset() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoPublishResult.get_collection().remove()
class RepoSyncManagerTests(base.PulpServerTests): def setUp(self): super(RepoSyncManagerTests, self).setUp() mock_plugins.install() # Create the manager instances for testing self.repo_manager = repo_manager.RepoManager() self.distributor_manager = distributor_manager.RepoDistributorManager() self.publish_manager = publish_manager.RepoPublishManager() def tearDown(self): super(RepoSyncManagerTests, self).tearDown() mock_plugins.reset() def clean(self): super(RepoSyncManagerTests, self).clean() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoPublishResult.get_collection().remove() @mock.patch( 'pulp.server.managers.repo.publish.publish.apply_async_with_reservation' ) def test_queue_publish(self, mock_publish_task): repo_id = 'foo' distributor_id = 'bar' overrides = {'baz': 1} self.publish_manager.queue_publish(repo_id, distributor_id, overrides) kwargs = { 'repo_id': repo_id, 'distributor_id': distributor_id, 'publish_config_override': overrides } tags = [ resource_tag(RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('publish') ] mock_publish_task.assert_called_with(RESOURCE_REPOSITORY_TYPE, repo_id, tags=tags, kwargs=kwargs) @mock.patch('pulp.server.managers.repo._common.get_working_directory', return_value="/var/cache/pulp/mock_worker/mock_task_id") @mock.patch( 'pulp.server.managers.event.fire.EventFireManager.fire_repo_publish_started' ) @mock.patch( 'pulp.server.managers.event.fire.EventFireManager.fire_repo_publish_finished' ) def test_publish(self, mock_finished, mock_started, mock_get_working_directory): """ Tests publish under normal conditions when everything is configured correctly. """ # Setup publish_config = {'foo': 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor-2', publish_config, False, distributor_id='dist-2') # Test self.publish_manager.publish('repo-1', 'dist-1', None) # Verify # Database repo_distributor = RepoDistributor.get_collection().find_one({ 'repo_id': 'repo-1', 'id': 'dist-1' }) self.assertTrue(repo_distributor['last_publish'] is not None) self.assertTrue(assert_last_sync_time( repo_distributor['last_publish'])) # History entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'repo-1'})) self.assertEqual(1, len(entries)) self.assertEqual('repo-1', entries[0]['repo_id']) self.assertEqual('dist-1', entries[0]['distributor_id']) self.assertEqual('mock-distributor', entries[0]['distributor_type_id']) self.assertTrue(entries[0]['started'] is not None) self.assertTrue(entries[0]['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_SUCCESS, entries[0]['result']) self.assertTrue(entries[0]['summary'] is not None) self.assertTrue(entries[0]['details'] is not None) self.assertTrue(entries[0]['error_message'] is None) self.assertTrue(entries[0]['exception'] is None) self.assertTrue(entries[0]['traceback'] is None) # Call into the correct distributor call_args = mock_plugins.MOCK_DISTRIBUTOR.publish_repo.call_args[0] self.assertEqual('repo-1', call_args[0].id) self.assertTrue(call_args[1] is not None) self.assertEqual({}, call_args[2].plugin_config) self.assertEqual(publish_config, call_args[2].repo_plugin_config) self.assertEqual({}, call_args[2].override_config) self.assertEqual( 0, mock_plugins.MOCK_DISTRIBUTOR_2.publish_repo.call_count) self.assertEqual(1, mock_started.call_count) self.assertEqual('repo-1', mock_started.call_args[0][0]) self.assertEqual(1, mock_finished.call_count) self.assertEqual('repo-1', mock_finished.call_args[0][0]['repo_id']) @mock.patch('pulp.server.managers.repo._common.get_working_directory', return_value="/var/cache/pulp/mock_worker/mock_task_id") def test_publish_failure_report(self, mock_get_working_directory): """ Tests a publish call that indicates a graceful failure. """ # Setup publish_config = {'foo': 'bar'} self.repo_manager.create_repo('repo-1') self.distributor_manager.add_distributor('repo-1', 'mock-distributor', publish_config, False, distributor_id='dist-1') mock_plugins.MOCK_DISTRIBUTOR.publish_repo.return_value = PublishReport( False, 'Summary of the publish', 'Details of the publish') # Test try: self.publish_manager.publish('repo-1', 'dist-1', None) self.fail("This should have raised a PulpCodedException") except PulpCodedException, data_exception: self.assertEquals(data_exception.error_code, error_codes.PLP0034) # Verify entries = list(RepoPublishResult.get_collection().find( {'repo_id': 'repo-1'})) self.assertEqual(1, len(entries)) check_me = entries[0] self.assertEqual('repo-1', check_me['repo_id']) self.assertEqual('dist-1', check_me['distributor_id']) self.assertEqual('mock-distributor', check_me['distributor_type_id']) self.assertTrue(check_me['started'] is not None) self.assertTrue(check_me['completed'] is not None) self.assertEqual(RepoPublishResult.RESULT_ERROR, check_me['result']) self.assertTrue(check_me['error_message'] is not None) self.assertTrue(check_me['exception'] is not None) self.assertTrue(check_me['traceback'] is not None) # Cleanup mock_plugins.reset()
_logger.exception('Error received removing distributor [%s] from repo [%s]' % ( repo_distributor['id'], repo_id)) error_tuples.append(e) # Database Updates repo = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo.delete() try: # Remove all importers and distributors from the repo. This is likely already done by the # calls to other methods in this manager, but in case those failed we still want to attempt # to keep the database clean. RepoDistributor.get_collection().remove({'repo_id': repo_id}, safe=True) RepoImporter.get_collection().remove({'repo_id': repo_id}, safe=True) RepoSyncResult.get_collection().remove({'repo_id': repo_id}, safe=True) RepoPublishResult.get_collection().remove({'repo_id': repo_id}, safe=True) RepoContentUnit.get_collection().remove({'repo_id': repo_id}, safe=True) except Exception, e: msg = _('Error updating one or more database collections while removing repo [%(r)s]') msg = msg % {'r': repo_id} _logger.exception(msg) error_tuples.append(e) # remove the repo from any groups it was a member of group_manager = manager_factory.repo_group_manager() group_manager.remove_repo_from_groups(repo_id) if len(error_tuples) > 0: pe = pulp_exceptions.PulpExecutionException() pe.child_exceptions = error_tuples raise pe
class RepoPublishManager(object): def publish(self, repo_id, distributor_id, publish_config_override=None): """ Requests the given distributor publish the repository it is configured on. The publish operation is executed synchronously in the caller's thread and will block until it is completed. The caller must take the necessary steps to address the fact that a publish call may be time intensive. @param repo_id: identifies the repo being published @type repo_id: str @param distributor_id: identifies the repo's distributor to publish @type distributor_id: str @param publish_config_override: optional config values to use for this publish call only @type publish_config_override: dict, None """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(repository=repo_id, distributor=distributor_id) distributor_instance, distributor_config = self._get_distributor_instance_and_config( repo_id, distributor_id) if distributor_instance is None: raise MissingResource(repo_id), None, sys.exc_info()[2] dispatch_context = dispatch_factory.context() dispatch_context.set_cancel_control_hook( distributor_instance.cancel_publish_repo) # Assemble the data needed for the publish conduit = RepoPublishConduit(repo_id, distributor_id) call_config = PluginCallConfiguration(distributor_config, repo_distributor['config'], publish_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( repo_distributor['distributor_type_id'], repo_id, mkdir=True) # Fire events describing the publish state fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_publish_started(repo_id, distributor_id) result = self._do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_publish_finished(result) dispatch_context.clear_cancel_control_hook() def _get_distributor_instance_and_config(self, repo_id, distributor_id): repo_distributor_manager = manager_factory.repo_distributor_manager() try: repo_distributor = repo_distributor_manager.get_distributor( repo_id, distributor_id) distributor, config = plugin_api.get_distributor_by_id( repo_distributor['distributor_type_id']) except (MissingResource, plugin_exceptions.PluginNotFound): distributor = None config = None return distributor, config def _do_publish(self, repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: publish_report = distributor_instance.publish_repo( transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _LOG.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_id})) raise PulpExecutionException(), None, sys.exc_info()[2] publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = _now_timestamp() distributor_coll.save(repo_distributor, safe=True) # Add a publish entry if publish_report is not None and isinstance(publish_report, PublishReport): summary = publish_report.summary details = publish_report.details if publish_report.success_flag: result_code = RepoPublishResult.RESULT_SUCCESS else: result_code = RepoPublishResult.RESULT_FAILED else: _LOG.warn( 'Plugin type [%s] on repo [%s] did not return a valid publish report' % (repo_distributor['distributor_type_id'], repo_id)) summary = details = _('Unknown') result_code = RepoPublishResult.RESULT_SUCCESS result = RepoPublishResult.expected_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, summary, details, result_code) publish_result_coll.save(result, safe=True) return result
def __init__(self, repo, publish_conduit, config, distributor_type): """ :param repo: Pulp managed Yum repository :type repo: pulp.plugins.model.Repository :param publish_conduit: Conduit providing access to relative Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: Pulp configuration for the distributor :type config: pulp.plugins.config.PluginCallConfiguration :param distributor_type: The type of the distributor that is being published :type distributor_type: str :ivar last_published: last time this distributor published the repo :ivar last_delete: last time a unit was removed from this repository :ivar repo: repository being operated on :ivar predistributor: distributor object that is associated with this distributor. It's publish history affects the type of publish is performed :ivar symlink_list: list of symlinks to rsync :ivar content_unit_file_list: list of content units to rsync :ivar symlink_src: path to directory containing all symlinks """ super(Publisher, self).__init__("Repository publish", repo, publish_conduit, config, distributor_type=distributor_type) self.distributor = Distributor.objects.get_or_404( repo_id=self.repo.id, distributor_id=publish_conduit.distributor_id) self.last_published = self.distributor["last_publish"] self.last_deleted = repo.last_unit_removed self.repo = repo self.predistributor = self._get_predistributor() self.last_predist_last_published = None if self.predistributor: scratchpad = self.distributor.scratchpad or {} self.last_predist_last_published = scratchpad.get( "last_predist_last_published") if self.last_published: string_date = dateutils.format_iso8601_datetime( self.last_published) else: string_date = None if self.predistributor: search_params = { 'repo_id': repo.id, 'distributor_id': self.predistributor["distributor_id"], 'started': { "$gte": string_date } } self.predist_history = RepoPublishResult.get_collection().find( search_params) else: self.predist_history = [] self.remote_path = self.get_remote_repo_path() if self.is_fastforward(): start_date = self.last_predist_last_published end_date = None if self.predistributor: end_date = self.predistributor["last_publish"] date_filter = self.create_date_range_filter(start_date=start_date, end_date=end_date) else: date_filter = None if self.predistributor: end_date = self.predistributor["last_publish"] date_filter = self.create_date_range_filter(None, end_date=end_date) self.symlink_list = [] self.content_unit_file_list = [] self.symlink_src = os.path.join(self.get_working_dir(), '.relative/') self._add_necesary_steps(date_filter=date_filter, config=config)
def publish_history(self, repo_id, distributor_id, limit=None, sort=constants.SORT_DESCENDING, start_date=None, end_date=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. :param repo_id: identifies the repo :type repo_id: str :param distributor_id: identifies the distributor to retrieve history for :type distributor_id: str :param limit: if specified, the query will only return up to this amount of entries; default is to return the entire publish history :type limit: int :param sort: Indicates the sort direction of the results, which are sorted by start date. Options are "ascending" and "descending". Descending is the default. :type sort: str :param start_date: if specified, no events prior to this date will be returned. Expected to be an iso8601 datetime string. :type start_date: str :param end_date: if specified, no events after this date will be returned. Expected to be an iso8601 datetime string. :type end_date: str :return: list of publish history result instances :rtype: list :raise MissingResource: if repo_id does not reference a valid repo :raise InvalidValue: if one or more of the options have invalid values """ # Validation repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if dist is None: raise MissingResource(distributor_id) invalid_values = [] # Verify the limit makes sense if limit is not None: try: limit = int(limit) if limit < 1: invalid_values.append('limit') except ValueError: invalid_values.append('limit') # Verify the sort direction is valid if sort not in constants.SORT_DIRECTION: invalid_values.append('sort') # Verify that start_date and end_date is valid if start_date is not None: try: dateutils.parse_iso8601_datetime(start_date) except (ValueError, isodate.ISO8601Error): invalid_values.append('start_date') if end_date is not None: try: dateutils.parse_iso8601_datetime(end_date) except (ValueError, isodate.ISO8601Error): invalid_values.append('end_date') # Report any invalid values if invalid_values: raise InvalidValue(invalid_values) # Assemble the mongo search parameters search_params = {'repo_id': repo_id, 'distributor_id': distributor_id} date_range = {} if start_date: date_range['$gte'] = start_date if end_date: date_range['$lte'] = end_date if len(date_range) > 0: search_params['started'] = date_range # Retrieve the entries cursor = RepoPublishResult.get_collection().find(search_params) # Sort the results on the 'started' field. By default, descending order is used cursor.sort('started', direction=constants.SORT_DIRECTION[sort]) if limit is not None: cursor.limit(limit) return list(cursor)
class RepoPublishManager(object): @staticmethod def publish(repo_id, distributor_id, publish_config_override=None): """ Requests the given distributor publish the repository it is configured on. The publish operation is executed synchronously in the caller's thread and will block until it is completed. The caller must take the necessary steps to address the fact that a publish call may be time intensive. @param repo_id: identifies the repo being published @type repo_id: str @param distributor_id: identifies the repo's distributor to publish @type distributor_id: str @param publish_config_override: optional config values to use for this publish call only @type publish_config_override: dict, None :return: report of the details of the publish :rtype: pulp.server.db.model.repository.RepoPublishResult """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(repository=repo_id, distributor=distributor_id) distributor_instance, distributor_config = RepoPublishManager.\ _get_distributor_instance_and_config(repo_id, distributor_id) # Assemble the data needed for the publish conduit = RepoPublishConduit(repo_id, distributor_id) call_config = PluginCallConfiguration(distributor_config, repo_distributor['config'], publish_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( repo_distributor['distributor_type_id'], repo_id, mkdir=True) # Fire events describing the publish state fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_publish_started(repo_id, distributor_id) result = RepoPublishManager._do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_publish_finished(result) return result @staticmethod def _get_distributor_instance_and_config(repo_id, distributor_id): repo_distributor_manager = manager_factory.repo_distributor_manager() repo_distributor = repo_distributor_manager.get_distributor( repo_id, distributor_id) distributor, config = plugin_api.get_distributor_by_id( repo_distributor['distributor_type_id']) return distributor, config @staticmethod def _do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler( distributor_instance.publish_repo, distributor_instance.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_id})) raise publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = _now_timestamp() distributor_coll.save(repo_distributor, safe=True) # Add a publish entry if publish_report is not None and isinstance(publish_report, PublishReport): summary = publish_report.summary details = publish_report.details if publish_report.success_flag: logger.debug( 'publish succeeded for repo [%s] with distributor ID [%s]' % (repo_id, distributor_id)) result_code = RepoPublishResult.RESULT_SUCCESS else: logger.info( 'publish failed for repo [%s] with distributor ID [%s]' % (repo_id, distributor_id)) logger.debug( 'summary for repo [%s] with distributor ID [%s]: %s' % (repo_id, distributor_id, summary)) result_code = RepoPublishResult.RESULT_FAILED else: msg = _( 'Plugin type [%(type)s] on repo [%(repo)s] did not return a valid publish ' 'report') msg = msg % { 'type': repo_distributor['distributor_type_id'], 'repo': repo_id } logger.warn(msg) summary = details = _('Unknown') result_code = RepoPublishResult.RESULT_SUCCESS result = RepoPublishResult.expected_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, summary, details, result_code) publish_result_coll.save(result, safe=True) return result
# Database Updates try: Repo.get_collection().remove({'id': repo_id}, safe=True) # Remove all importers and distributors from the repo # This is likely already done by the calls to other methods in # this manager, but in case those failed we still want to attempt # to keep the database clean RepoDistributor.get_collection().remove({'repo_id': repo_id}, safe=True) RepoImporter.get_collection().remove({'repo_id': repo_id}, safe=True) RepoSyncResult.get_collection().remove({'repo_id': repo_id}, safe=True) RepoPublishResult.get_collection().remove({'repo_id': repo_id}, safe=True) # Remove all associations from the repo RepoContentUnit.get_collection().remove({'repo_id': repo_id}, safe=True) except Exception, e: _LOG.exception( 'Error updating one or more database collections while removing repo [%s]' % repo_id) error_tuples.append((_('Database Removal Error'), e.args)) # remove the repo from any groups it was a member of group_manager = manager_factory.repo_group_manager() group_manager.remove_repo_from_groups(repo_id) if len(error_tuples) > 0:
def publish_history(self, repo_id, distributor_id, limit=None, sort=constants.SORT_DESCENDING, start_date=None, end_date=None): """ Returns publish history entries for the give repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. :param repo_id: identifies the repo :type repo_id: str :param distributor_id: identifies the distributor to retrieve history for :type distributor_id: str :param limit: if specified, the query will only return up to this amount of entries; default is to return the entire publish history :type limit: int :param sort: Indicates the sort direction of the results, which are sorted by start date. Options are "ascending" and "descending". Descending is the default. :type sort: str :param start_date: if specified, no events prior to this date will be returned. Expected to be an iso8601 datetime string. :type start_date: str :param end_date: if specified, no events after this date will be returned. Expected to be an iso8601 datetime string. :type end_date: str :return: list of publish history result instances :rtype: list :raise MissingResource: if repo_id does not reference a valid repo :raise InvalidValue: if one or more of the options have invalid values """ # Validation repo = Repo.get_collection().find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) dist = RepoDistributor.get_collection().find_one({'repo_id': repo_id, 'id': distributor_id}) if dist is None: raise MissingResource(distributor_id) invalid_values = [] # Verify the limit makes sense if limit is not None: try: limit = int(limit) if limit < 1: invalid_values.append('limit') except ValueError: invalid_values.append('limit') # Verify the sort direction is valid if sort not in constants.SORT_DIRECTION: invalid_values.append('sort') # Verify that start_date and end_date is valid if start_date is not None: try: dateutils.parse_iso8601_datetime(start_date) except (ValueError, isodate.ISO8601Error): invalid_values.append('start_date') if end_date is not None: try: dateutils.parse_iso8601_datetime(end_date) except (ValueError, isodate.ISO8601Error): invalid_values.append('end_date') # Report any invalid values if invalid_values: raise InvalidValue(invalid_values) # Assemble the mongo search parameters search_params = {'repo_id': repo_id, 'distributor_id': distributor_id} date_range = {} if start_date: date_range['$gte'] = start_date if end_date: date_range['$lte'] = end_date if len(date_range) > 0: search_params['started'] = date_range # Retrieve the entries cursor = RepoPublishResult.get_collection().find(search_params) # Sort the results on the 'started' field. By default, descending order is used cursor.sort('started', direction=constants.SORT_DIRECTION[sort]) if limit is not None: cursor.limit(limit) return list(cursor)
def _validate_repo_publish_result(): objectdb = RepoPublishResult.get_collection() reference = RepoPublishResult('', '', '', '', '', '') return _validate_model(RepoPublishResult.__name__, objectdb, reference)