def GET(self, repo_id, distributor_id): # Params filters = self.filters([constants.REPO_HISTORY_FILTER_LIMIT, constants.REPO_HISTORY_FILTER_SORT, constants.REPO_HISTORY_FILTER_START_DATE, constants.REPO_HISTORY_FILTER_END_DATE]) limit = filters.get(constants.REPO_HISTORY_FILTER_LIMIT, None) sort = filters.get(constants.REPO_HISTORY_FILTER_SORT, None) start_date = filters.get(constants.REPO_HISTORY_FILTER_START_DATE, None) end_date = filters.get(constants.REPO_HISTORY_FILTER_END_DATE, None) if limit is not None: try: limit = int(limit[0]) except ValueError: _LOG.error('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue([constants.REPO_HISTORY_FILTER_LIMIT]) if sort is None: sort = constants.SORT_DESCENDING else: sort = sort[0] if start_date: start_date = start_date[0] if end_date: end_date = end_date[0] publish_manager = manager_factory.repo_publish_manager() entries = publish_manager.publish_history(repo_id, distributor_id, limit=limit, sort=sort, start_date=start_date, end_date=end_date) return self.ok(entries)
def get(self, request, repo_id, distributor_id): """ Retrieve publish history for a specified distributor. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param repo_id: id of the repository :type repo_id: str :param repo_id: retrieve the publish history of this distributor :type repo_id: str :return: Response containing a list of dicts, one for each publish event :rtype : django.http.HttpResponse """ sort = request.GET.get(constants.REPO_HISTORY_FILTER_SORT) start_date = request.GET.get(constants.REPO_HISTORY_FILTER_START_DATE) end_date = request.GET.get(constants.REPO_HISTORY_FILTER_END_DATE) limit = request.GET.get(constants.REPO_HISTORY_FILTER_LIMIT) if limit: try: limit = int(limit) except ValueError: raise pulp_exceptions.InvalidValue([constants.REPO_HISTORY_FILTER_LIMIT]) if not sort: sort = constants.SORT_DESCENDING publish_manager = manager_factory.repo_publish_manager() entries = publish_manager.publish_history(repo_id, distributor_id, limit=limit, sort=sort, start_date=start_date, end_date=end_date) return generate_json_response_with_pulp_encoder(entries)
def sync_with_auto_publish(repo_id, overrides=None): """ Sync a repository and upon successful completion, publish any distributors that are configured for auto publish. :param repo_id: id of the repository to create a sync call request list for :type repo_id: str :param overrides: dictionary of configuration overrides for this sync :type overrides: dict or None :return: A task result containing the details of the task executed and any spawned tasks :rtype: TaskResult """ sync_result = managers.repo_sync_manager().sync(repo_id, sync_config_override=overrides) result = TaskResult(sync_result) repo_publish_manager = managers.repo_publish_manager() auto_distributors = repo_publish_manager.auto_distributors(repo_id) spawned_tasks = [] for distributor in auto_distributors: distributor_id = distributor['id'] spawned_tasks.append(publish(repo_id, distributor_id)) result.spawned_tasks = spawned_tasks return result
def test_syntactic_sugar_methods(self): """ Tests the syntactic sugar methods for retrieving specific managers. """ # Setup factory.initialize() # Test self.assertTrue(isinstance(factory.authentication_manager(), AuthenticationManager)) self.assertTrue(isinstance(factory.cert_generation_manager(), CertGenerationManager)) self.assertTrue(isinstance(factory.certificate_manager(), CertificateManager)) self.assertTrue(isinstance(factory.password_manager(), PasswordManager)) self.assertTrue(isinstance(factory.permission_manager(), PermissionManager)) self.assertTrue(isinstance(factory.permission_query_manager(), PermissionQueryManager)) self.assertTrue(isinstance(factory.role_manager(), RoleManager)) self.assertTrue(isinstance(factory.role_query_manager(), RoleQueryManager)) self.assertTrue(isinstance(factory.user_manager(), UserManager)) self.assertTrue(isinstance(factory.user_query_manager(), UserQueryManager)) self.assertTrue(isinstance(factory.repo_manager(), RepoManager)) self.assertTrue(isinstance(factory.repo_unit_association_manager(), RepoUnitAssociationManager)) self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager)) self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager)) self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager)) self.assertTrue(isinstance(factory.content_manager(), ContentManager)) self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager)) self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager)) self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager)) self.assertTrue(isinstance(factory.topic_publish_manager(), TopicPublishManager))
def publish_itinerary(repo_id, distributor_id, overrides=None): """ Create an itinerary for repo publish. @param repo_id: id of the repo to publish @type repo_id: str @param distributor_id: id of the distributor to use for the repo publish @type distributor_id: str @param overrides: dictionary of options to pass to the publish manager @type overrides: dict or None @return: list of call requests @rtype: list """ repo_publish_manager = manager_factory.repo_publish_manager() weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('publish')] call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], {'publish_config_override': overrides}, weight=weight, tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return [call_request]
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) self._validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the publish call publish_manager = managers_factory.repo_publish_manager() args = [repo_id, distributor_id] kwargs = {'publish_config_override': publish_options['override_config']} weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)] call_request = CallRequest(publish_manager.publish, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, publish_manager.prep_publish) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def publish_itinerary(repo_id, distributor_id, overrides=None): """ Create an itinerary for repo publish. @param repo_id: id of the repo to publish @type repo_id: str @param distributor_id: id of the distributor to use for the repo publish @type distributor_id: str @param overrides: dictionary of options to pass to the publish manager @type overrides: dict or None @return: list of call requests @rtype: list """ repo_publish_manager = manager_factory.repo_publish_manager() weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('publish') ] call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], {'publish_config_override': overrides}, weight=weight, tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return [call_request]
def POST(self, repo_id): # TODO: Add timeout support # Params params = self.params() overrides = params.get("override_config", None) # Execute the sync asynchronously repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint("tasks", "sync_weight") sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("sync")] sync_call_request = CallRequest( repo_sync_manager.sync, [repo_id], {"sync_config_override": overrides}, weight=sync_weight, tags=sync_tags, archive=True, ) sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) sync_call_request.add_life_cycle_callback( dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync ) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("auto_publish"), action_tag("publish"), ] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor["id"] publish_call_request = CallRequest( repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True ) publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.add_life_cycle_callback( dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish ) publish_call_request.depends_on(sync_call_request) call_requests.append(publish_call_request) # this raises an exception that is handled by the middleware, # so no return is needed execution.execute_multiple(call_requests)
def sync_with_auto_publish_itinerary(repo_id, overrides=None): """ Create a call request list for the synchronization of a repository and the publishing of any distributors that are configured for auto publish. @param repo_id: id of the repository to create a sync call request list for @type repo_id: str @param overrides: dictionary of configuration overrides for this sync @type overrides: dict or None @return: list of call request instances @rtype: list """ repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint('tasks', 'sync_weight') sync_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('sync') ] sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id], {'sync_config_override': overrides}, weight=sync_weight, tags=sync_tags, archive=True) sync_call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('auto_publish'), action_tag('publish') ] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor['id'] publish_call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True) publish_call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.depends_on( sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE]) call_requests.append(publish_call_request) return call_requests
def publish(repo_id, distributor_id, overrides=None): """ Create an itinerary for repo publish. :param repo_id: id of the repo to publish :type repo_id: str :param distributor_id: id of the distributor to use for the repo publish :type distributor_id: str :param overrides: dictionary of options to pass to the publish manager :type overrides: dict or None :return: list of call requests :rtype: list """ return managers.repo_publish_manager().queue_publish(repo_id, distributor_id, overrides)
def publish(repo_id, distributor_id, overrides=None): """ Create an itinerary for repo publish. :param repo_id: id of the repo to publish :type repo_id: str :param distributor_id: id of the distributor to use for the repo publish :type distributor_id: str :param overrides: dictionary of options to pass to the publish manager :type overrides: dict or None :return: list of call requests :rtype: list """ return managers.repo_publish_manager().queue_publish( repo_id, distributor_id, overrides)
def sync_with_auto_publish_itinerary(repo_id, overrides=None): """ Create a call request list for the synchronization of a repository and the publishing of any distributors that are configured for auto publish. @param repo_id: id of the repository to create a sync call request list for @type repo_id: str @param overrides: dictionary of configuration overrides for this sync @type overrides: dict or None @return: list of call request instances @rtype: list """ repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint('tasks', 'sync_weight') sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('sync')] sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id], {'sync_config_override': overrides}, weight=sync_weight, tags=sync_tags, archive=True) sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) sync_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('auto_publish'), action_tag('publish')] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor['id'] publish_call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True) publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish) publish_call_request.depends_on(sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE]) call_requests.append(publish_call_request) return call_requests
def test_syntactic_sugar_methods(self): """ Tests the syntactic sugar methods for retrieving specific managers. """ # Test self.assertTrue(isinstance(factory.repo_manager(), RepoManager)) self.assertTrue(isinstance(factory.repo_unit_association_manager(), RepoUnitAssociationManager)) self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager)) self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager)) self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager)) self.assertTrue(isinstance(factory.content_manager(), ContentManager)) self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager)) self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager)) self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager))
def GET(self, repo_id, distributor_id): # Params filters = self.filters(['limit']) limit = filters.get('limit', None) if limit is not None: try: limit = int(limit[0]) except ValueError: _LOG.exception('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue(['limit']) publish_manager = manager_factory.repo_publish_manager() entries = publish_manager.publish_history(repo_id, distributor_id, limit=limit) return self.ok(entries)
def last_publish(self): """ Returns the timestamp of the last time this repo was published, regardless of the success or failure of the publish. If the repo was never published, this call returns None. @return: timestamp instance describing the last publish @rtype: datetime or None """ try: repo_publish_manager = manager_factory.repo_publish_manager() last = repo_publish_manager.last_publish(self.repo_id, self.distributor_id) return last except Exception, e: _LOG.exception('Error getting last publish time for repo [%s]' % self.repo_id) raise DistributorConduitException(e), None, sys.exc_info()[2]
def last_publish(self): """ Returns the timestamp of the last time this repo was published, regardless of the success or failure of the publish. If the repo was never published, this call returns None. @return: timestamp instance describing the last publish @rtype: datetime.datetime or None """ try: repo_publish_manager = manager_factory.repo_publish_manager() last = repo_publish_manager.last_publish(self.repo_id, self.distributor_id) return last except Exception, e: _LOG.exception('Error getting last publish time for repo [%s]' % self.repo_id) raise DistributorConduitException(e), None, sys.exc_info()[2]
def GET(self, repo_id, distributor_id): # Params filters = self.filters(['limit']) limit = filters.get('limit', None) if limit is not None: try: limit = int(limit[0]) except ValueError: _LOG.error('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue(['limit']) publish_manager = manager_factory.repo_publish_manager() entries = publish_manager.publish_history(repo_id, distributor_id, limit=limit) return self.ok(entries)
def POST(self, repo_id): # Params params = self.params() distributor_id = params.get('id', None) overrides = params.get('override_config', None) # Execute the publish asynchronously repo_publish_manager = manager_factory.repo_publish_manager() weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('publish')] call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], {'publish_config_override': overrides}, weight=weight, tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish) return execution.execute_async(self, call_request)
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) self._validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if "schedule" not in schedule_data: raise pulp_exceptions.MissingValue(["schedule"]) # build the publish call publish_manager = managers_factory.repo_publish_manager() args = [repo_id, distributor_id] kwargs = {"publish_config_override": publish_options["override_config"]} resources = { dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}, dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: { distributor_id: dispatch_constants.RESOURCE_READ_OPERATION }, } weight = pulp_config.config.getint("tasks", "publish_weight") tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), ] call_request = CallRequest(publish_manager.publish, args, kwargs, resources, None, weight, tags, archive=True) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def GET(self, repo_id, distributor_id): # Params filters = self.filters([ constants.REPO_HISTORY_FILTER_LIMIT, constants.REPO_HISTORY_FILTER_SORT, constants.REPO_HISTORY_FILTER_START_DATE, constants.REPO_HISTORY_FILTER_END_DATE ]) limit = filters.get(constants.REPO_HISTORY_FILTER_LIMIT, None) sort = filters.get(constants.REPO_HISTORY_FILTER_SORT, None) start_date = filters.get(constants.REPO_HISTORY_FILTER_START_DATE, None) end_date = filters.get(constants.REPO_HISTORY_FILTER_END_DATE, None) if limit is not None: try: limit = int(limit[0]) except ValueError: _logger.error('Invalid limit specified [%s]' % limit) raise exceptions.InvalidValue( [constants.REPO_HISTORY_FILTER_LIMIT]) if sort is None: sort = constants.SORT_DESCENDING else: sort = sort[0] if start_date: start_date = start_date[0] if end_date: end_date = end_date[0] publish_manager = manager_factory.repo_publish_manager() entries = publish_manager.publish_history(repo_id, distributor_id, limit=limit, sort=sort, start_date=start_date, end_date=end_date) return self.ok(entries)
def sync(repo_id, sync_config_override=None): """ Performs a synchronize operation on the given repository. The given repo must have an importer configured. The identity of the importer is not a parameter to this call; if multiple importers are eventually supported this will have to change to indicate which importer to use. This method is intentionally limited to synchronizing a single repo. Performing multiple repository syncs concurrently will require a more global view of the server and must be handled outside the scope of this class. @param repo_id: identifies the repo to sync @type repo_id: str @param sync_config_override: optional config containing values to use for this sync only @type sync_config_override: dict @return: The synchronization report. @rtype: L{pulp.server.plugins.model.SyncReport} @raise MissingResource: if repo_id does not refer to a valid repo @raise OperationFailed: if the given repo does not have an importer set """ repo_coll = Repo.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) importer_instance, importer_config = RepoSyncManager._get_importer_instance_and_config( repo_id) if importer_instance is None: raise MissingResource(repo_id) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) # Assemble the data needed for the sync conduit = RepoSyncConduit(repo_id, repo_importer['id']) call_config = PluginCallConfiguration(importer_config, repo_importer['config'], sync_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.get_working_directory() # Fire an events around the call fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_sync_started(repo_id) sync_result = RepoSyncManager._do_sync(repo, importer_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_sync_finished(sync_result) if sync_result['result'] == RepoSyncResult.RESULT_FAILED: raise PulpExecutionException( _('Importer indicated a failed response')) repo_publish_manager = manager_factory.repo_publish_manager() auto_distributors = repo_publish_manager.auto_distributors(repo_id) spawned_tasks = [] for distributor in auto_distributors: distributor_id = distributor['id'] spawned_tasks.append( repo_publish_manager.queue_publish(repo_id, distributor_id).task_id) return TaskResult(sync_result, spawned_tasks=spawned_tasks)
def sync(repo_id, sync_config_override=None): """ Performs a synchronize operation on the given repository. The given repo must have an importer configured. The identity of the importer is not a parameter to this call; if multiple importers are eventually supported this will have to change to indicate which importer to use. This method is intentionally limited to synchronizing a single repo. Performing multiple repository syncs concurrently will require a more global view of the server and must be handled outside the scope of this class. @param repo_id: identifies the repo to sync @type repo_id: str @param sync_config_override: optional config containing values to use for this sync only @type sync_config_override: dict @return: The synchronization report. @rtype: L{pulp.server.plugins.model.SyncReport} @raise MissingResource: if repo_id does not refer to a valid repo @raise OperationFailed: if the given repo does not have an importer set """ repo_coll = Repo.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) importer_instance, importer_config = RepoSyncManager._get_importer_instance_and_config( repo_id) if importer_instance is None: raise MissingResource(repo_id) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) # Assemble the data needed for the sync conduit = RepoSyncConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_IMPORTER, repo_importer['id']) call_config = PluginCallConfiguration(importer_config, repo_importer['config'], sync_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.get_working_directory() # Fire an events around the call fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_sync_started(repo_id) sync_result = RepoSyncManager._do_sync(repo, importer_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_sync_finished(sync_result) if sync_result['result'] == RepoSyncResult.RESULT_FAILED: raise PulpExecutionException(_('Importer indicated a failed response')) repo_publish_manager = manager_factory.repo_publish_manager() auto_distributors = repo_publish_manager.auto_distributors(repo_id) spawned_tasks = [] for distributor in auto_distributors: distributor_id = distributor['id'] spawned_tasks.append( repo_publish_manager.queue_publish(repo_id, distributor_id).task_id) return TaskResult(sync_result, spawned_tasks=spawned_tasks)