def sync_with_auto_publish(repo_id, overrides=None): """ Sync a repository and upon successful completion, publish any distributors that are configured for auto publish. :param repo_id: id of the repository to create a sync call request list for :type repo_id: str :param overrides: dictionary of configuration overrides for this sync :type overrides: dict or None :return: A task result containing the details of the task executed and any spawned tasks :rtype: TaskResult """ sync_result = managers.repo_sync_manager().sync(repo_id, sync_config_override=overrides) result = TaskResult(sync_result) repo_publish_manager = managers.repo_publish_manager() auto_distributors = repo_publish_manager.auto_distributors(repo_id) spawned_tasks = [] for distributor in auto_distributors: distributor_id = distributor['id'] spawned_tasks.append(publish(repo_id, distributor_id)) result.spawned_tasks = spawned_tasks return result
def test_distributor_update_with_bindings(self, mock_dist_manager, mock_bind_manager, mock_bind): generated_distributor = {'foo': 'bar'} mock_dist_manager.return_value.update_distributor_config.return_value = \ generated_distributor mock_bind_manager.return_value.find_by_distributor.return_value = [{ 'consumer_id': 'foo', 'repo_id': 'repo-foo', 'distributor_id': 'dist-id', 'notify_agent': True, 'binding_config': { 'conf': 'baz' } }] mock_bind.return_value = TaskResult( spawned_tasks=[{ 'task_id': 'foo-request-id' }]) result = repository.distributor_update('foo-id', 'bar-id', {}, None) self.assertEquals(None, result.error) mock_bind.assert_called_once_with('foo', 'repo-foo', 'dist-id', True, {'conf': 'baz'}, ANY) self.assertEquals(result.spawned_tasks[0], {'task_id': 'foo-request-id'})
def update_repo_and_plugins(repo, repo_delta, importer_config, distributor_configs): """ Update a reposiory and its related collections. All details do not need to be specified; if a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt to update the repository object, then the importer, then the distributors. If an exception occurs during any of these steps, the updates stop and the exception is immediately raised. Any updates that have already taken place are not rolled back. Distributor updates are asynchronous as there could be a very large number of consumers to update. Repository and importer updates are done synchronously. :param repo: repository object :type repo: pulp.server.db.model.Repository :param repo_delta: list of attributes to change and their new values; if None, no attempt to update the repository object will be made :type repo_delta: dict, None :param importer_config: new configuration to use for the repo's importer; if None, no attempt will be made to update the importer :type importer_config: dict, None :param distributor_configs: mapping of distributor ID to the new configuration to set for it :type distributor_configs: dict, None :return: Task result that contains the updated repository object and additional spawned tasks :rtype: pulp.server.async.tasks.TaskResult :raises pulp_exceptions.InvalidValue: if repo_delta is not a dictionary """ if repo_delta: if isinstance(repo_delta, dict): repo.update_from_delta(repo_delta) repo.save() else: raise pulp_exceptions.PulpCodedValidationException( error_code=error_codes.PLP1010, field='delta', field_type='dict', value=repo_delta) if importer_config is not None: importer_controller.update_importer_config(repo.repo_id, importer_config) additional_tasks = [] if distributor_configs is not None: for dist_id, dist_config in distributor_configs.items(): task_tags = [ tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo.repo_id), tags.resource_tag(tags.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, dist_id), tags.action_tag(tags.ACTION_UPDATE_DISTRIBUTOR) ] async_result = dist_controller.update.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo.repo_id, [repo.repo_id, dist_id, dist_config, None], tags=task_tags) additional_tasks.append(async_result) return TaskResult(repo, None, additional_tasks)
def _get_operation_postponed_body(exception): report = exception.call_report if isinstance(exception.call_report, AsyncResult): report = TaskResult.from_async_result(exception.call_report) serialized_call_report = report.serialize() for task in serialized_call_report['spawned_tasks']: href_obj = serialization.dispatch.task_result_href(task) task.update(href_obj) return json.dumps(serialized_call_report, default=json_util.default)
def _get_operation_postponed_body(exception): report = exception.call_report if isinstance(exception.call_report, AsyncResult): report = TaskResult.from_async_result(exception.call_report) serialized_call_report = report.serialize() for task in serialized_call_report['spawned_tasks']: href_obj = dispatch.task_result_href(task) task.update(href_obj) return json.dumps(serialized_call_report, default=json_util.default)
def uninstall(self, consumer_id): """ Uninstall content (units) on a consumer. Expected body: {units:[], options:<dict>} where unit is: {type_id:<str>, unit_key={}} and the options is a dict of uninstall options. @param consumer_id: A consumer ID. @type consumer_id: str """ body = self.params() units = body.get('units') options = body.get('options') agent_manager = managers.consumer_agent_manager() task = agent_manager.uninstall_content(consumer_id, units, options) raise OperationPostponed(TaskResult.from_task_status_dict(task))
def test_bind_no_errors(self, mock_query_manager, mock_unbind): mock_query_manager.return_value.get_group.return_value = { 'consumer_ids': ['foo-consumer'] } options = {'bar': 'baz'} mock_unbind.return_value = TaskResult( spawned_tasks=[{ 'task_id': 'foo-request-id' }]) result = consumer_group.unbind('foo_group_id', 'foo_repo_id', 'foo_distributor_id', options) mock_unbind.assert_called_once_with('foo-consumer', 'foo_repo_id', 'foo_distributor_id', options) self.assertEquals(result.spawned_tasks[0], {'task_id': 'foo-request-id'})
def _get_operation_postponed_body(exception): report = exception.call_report if isinstance(exception.call_report, AsyncResult): report = TaskResult.from_async_result(exception.call_report) serialized_call_report = report.serialize() for task in serialized_call_report['spawned_tasks']: href_obj = dispatch.task_result_href(task) task.update(href_obj) # Use the object's serializer if it is a Mongoengine Document. result = serialized_call_report.get('result') if hasattr(result, 'serializer'): serialized_call_report['result'] = result.serializer(result).data return json.dumps(serialized_call_report, default=json_util.default)
def _get_operation_postponed_body(exception): report = exception.call_report if isinstance(exception.call_report, AsyncResult): report = TaskResult.from_async_result(exception.call_report) serialized_call_report = report.serialize() if 'spawned_tasks' in serialized_call_report: for task in serialized_call_report['spawned_tasks']: href_obj = dispatch.task_result_href(task) task.update(href_obj) # Use the object's serializer if it is a Mongoengine Document. result = serialized_call_report.get('result') if hasattr(result, 'serializer'): serialized_call_report['result'] = result.serializer(result).data return json.dumps(serialized_call_report, default=json_util.default)
def test_delete_with_bindings(self, mock_repo_manager, mock_bind_manager, mock_unbind): mock_bind_manager.return_value.find_by_repo.return_value = [{ 'consumer_id': 'foo', 'repo_id': 'repo-foo', 'distributor_id': 'dist-id' }] mock_unbind.return_value = TaskResult( spawned_tasks=[{ 'task_id': 'foo-request-id' }]) result = repository.delete('foo-repo') mock_unbind.assert_called_once_with('foo', 'repo-foo', 'dist-id', ANY) self.assertEquals(result.spawned_tasks[0], {'task_id': 'foo-request-id'})
def unbind(consumer_id, repo_id, distributor_id, options): """ Unbind a consumer. The itinerary is: 1. Unbind the consumer from the repo on the server (mark the binding on the server as deleted.) 2. Request that the consumer (agent) perform the unbind. 3. Delete the binding on the server. :param consumer_id: A consumer ID. :type consumer_id: str :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor ID. :type distributor_id: str :param options: Unbind options passed to the agent handler. :type options: dict :returns TaskResult containing the result of the unbind & any spawned tasks or a dictionary of the unbind result if no tasks were spawned. :rtype: TaskResult """ bind_manager = managers.consumer_bind_manager() binding = bind_manager.get_bind(consumer_id, repo_id, distributor_id) response = TaskResult(result=binding) if binding['notify_agent']: # Unbind the consumer from the repo on the server bind_manager.unbind(consumer_id, repo_id, distributor_id) # Notify the agent to remove the binding. # The agent notification handler will delete the binding from the server agent_manager = managers.consumer_agent_manager() task = agent_manager.unbind(consumer_id, repo_id, distributor_id, options) # we only want the task's ID, not the full task response.spawned_tasks.append({'task_id': task['task_id']}) else: # Since there was no agent notification, perform the delete immediately bind_manager.delete(consumer_id, repo_id, distributor_id, True) return response
def __call__(self, environ, start_response): try: return self.app(environ, start_response) except OperationPostponed, e: report = e.call_report if isinstance(e.call_report, AsyncResult): report = TaskResult.from_async_result(e.call_report) serialized_call_report = report.serialize() for task in serialized_call_report['spawned_tasks']: href_obj = serialization.dispatch.task_result_href(task) task.update(href_obj) body = json.dumps(serialized_call_report, default=json_util.default) self.headers['Content-Length'] = str(len(body)) start_str = '%d %s' % (e.http_status_code, http_responses[e.http_status_code]) start_response(start_str, [(k, v) for k, v in self.headers.items()]) return [body]
def bind(consumer_id, repo_id, distributor_id, notify_agent, binding_config, agent_options): """ Bind a repo to a consumer: 1. Create the binding on the server. 2. Request that the consumer (agent) perform the bind. :param consumer_id: A consumer ID. :type consumer_id: str :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor ID. :type distributor_id: str :param agent_options: Bind options passed to the agent handler. :type agent_options: dict :param notify_agent: indicates if the agent should be sent a message about the new binding :type notify_agent: bool :param binding_config: configuration options to use when generating the payload for this binding :returns TaskResult containing the result of the bind & any spawned tasks or a dictionary of the bind result if no tasks were spawned. :rtype: TaskResult :raises pulp.server.exceptions.MissingResource: when given consumer does not exist """ # Create the binding on the server bind_manager = managers.consumer_bind_manager() binding = bind_manager.bind(consumer_id, repo_id, distributor_id, notify_agent, binding_config) response = TaskResult(result=binding) # Notify the agents of the binding if notify_agent: agent_manager = managers.consumer_agent_manager() task = agent_manager.bind(consumer_id, repo_id, distributor_id, agent_options) # we only want the task's ID, not the full task response.spawned_tasks.append({'task_id': task['task_id']}) return response
def uninstall(self, request, consumer_id, units, options): """ Uninstall content (units) on a consumer. Expected body: {units:[], options:<dict>} where unit is: {type_id:<str>, unit_key={}} and the options is a dict of uninstall options. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param consumer_id: A consumer ID. :type consumer_id: str :param units: units to install :type units: list :param options: install options :type options: dict :raises OperationPostponed: when an async operation is performed. """ agent_manager = factory.consumer_agent_manager() task = agent_manager.uninstall_content(consumer_id, units, options) raise OperationPostponed(TaskResult.from_task_status_dict(task))
def force_unbind(consumer_id, repo_id, distributor_id, options): """ Get the unbind itinerary. A forced unbind immediately deletes the binding instead of marking it deleted and going through that lifecycle. It is intended to be used to clean up orphaned bindings caused by failed/unconfirmed unbind actions on the consumer. The itinerary is: 1. Delete the binding on the server. 2. Request that the consumer (agent) perform the unbind. :param consumer_id: A consumer ID. :type consumer_id: str :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor ID. :type distributor_id: str :param options: Unbind options passed to the agent handler. :type options: dict :returns TaskResult containing the result of the unbind & any spawned tasks or a dictionary of the unbind result if no tasks were spawned. :rtype: TaskResult """ bind_manager = managers.consumer_bind_manager() binding = bind_manager.get_bind(consumer_id, repo_id, distributor_id) bind_manager.delete(consumer_id, repo_id, distributor_id, True) response = TaskResult() if binding['notify_agent']: agent_manager = managers.consumer_agent_manager() task = agent_manager.unbind(consumer_id, repo_id, distributor_id, options) # we only want the task's ID, not the full task response.spawned_tasks.append({'task_id': task['task_id']}) return response
def update(self, consumer_id): """ Update content (units) on a consumer. Expected body: {units:[], options:<dict>} where unit is: {type_id:<str>, unit_key={}} and the options is a dict of update options. @param consumer_id: A consumer ID. @type consumer_id: str """ body = self.params() missing_params = [] units = body.get('units') if units is None: missing_params.append('units') options = body.get('options') if options is None: missing_params.append('options') if len(missing_params) > 0: raise MissingValue(missing_params) agent_manager = managers.consumer_agent_manager() task = agent_manager.update_content(consumer_id, units, options) raise OperationPostponed(TaskResult.from_task_status_dict(task))
logger.debug(e.message) bind_errors.append(e) except Exception, e: logger.exception(e.message) # Don't do anything else since we still want to process all the other consumers bind_errors.append(e) bind_error = None if len(bind_errors) > 0: bind_error = PulpCodedException(error_codes.PLP0004, repo_id=repo_id, distributor_id=distributor_id, group_id=group_id) bind_error.child_exceptions = bind_errors return TaskResult(error=bind_error, spawned_tasks=additional_tasks) @staticmethod def unbind(group_id, repo_id, distributor_id, options): """ Unbind the members of the specified consumer group. :param group_id: A consumer group ID. :type group_id: str :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor ID. :type distributor_id: str :param options: Bind options passed to the agent handler. :type options: dict :return: TaskResult containing the ids of all the spawned tasks & bind errors :rtype: TaskResult
def sync(repo_id, sync_config_override=None): """ Performs a synchronize operation on the given repository. The given repo must have an importer configured. The identity of the importer is not a parameter to this call; if multiple importers are eventually supported this will have to change to indicate which importer to use. This method is intentionally limited to synchronizing a single repo. Performing multiple repository syncs concurrently will require a more global view of the server and must be handled outside the scope of this class. @param repo_id: identifies the repo to sync @type repo_id: str @param sync_config_override: optional config containing values to use for this sync only @type sync_config_override: dict @return: The synchronization report. @rtype: L{pulp.server.plugins.model.SyncReport} @raise MissingResource: if repo_id does not refer to a valid repo @raise OperationFailed: if the given repo does not have an importer set """ repo_coll = Repo.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) importer_instance, importer_config = RepoSyncManager._get_importer_instance_and_config( repo_id) if importer_instance is None: raise MissingResource(repo_id) importer_manager = manager_factory.repo_importer_manager() repo_importer = importer_manager.get_importer(repo_id) # Assemble the data needed for the sync conduit = RepoSyncConduit(repo_id, repo_importer['id']) call_config = PluginCallConfiguration(importer_config, repo_importer['config'], sync_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.get_working_directory() # Fire an events around the call fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_sync_started(repo_id) sync_result = RepoSyncManager._do_sync(repo, importer_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_sync_finished(sync_result) if sync_result['result'] == RepoSyncResult.RESULT_FAILED: raise PulpExecutionException( _('Importer indicated a failed response')) repo_publish_manager = manager_factory.repo_publish_manager() auto_distributors = repo_publish_manager.auto_distributors(repo_id) spawned_tasks = [] for distributor in auto_distributors: distributor_id = distributor['id'] spawned_tasks.append( repo_publish_manager.queue_publish(repo_id, distributor_id).task_id) return TaskResult(sync_result, spawned_tasks=spawned_tasks)
for bind in bind_manager.find_by_distributor(repo_id, dist_id): try: report = bind_manager.unbind(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: unbind_errors.append(e) bind_error = None if unbind_errors: bind_error = exceptions.PulpCodedException(PLP0003, repo_id=repo_id, distributor_id=dist_id) bind_error.child_exceptions = unbind_errors return TaskResult(error=bind_error, spawned_tasks=additional_tasks) def queue_update(distributor, config, delta): """ Dispatch a task to update a distributor. :param distributor: distributor to be updated :type distributor: pulp.server.db.model.Distributor :param config: A configuration dictionary for a distributor instance. The contents of this dict depends on the type of distributor. Values of None will remove they key from the config. Keys ommited from this dictionary will remain unchanged. :type config: dict :param delta: A dictionary used to change conf values for a distributor instance. This currently only supports the 'auto_publish' keyword, which should have a value of type bool :type delta: dict or None
report = consumer_controller.unbind(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: errors.append(e) error = None if len(errors) > 0: error = pulp_exceptions.PulpCodedException(error_codes.PLP0007, repo_id=repo_id) error.child_exceptions = errors return TaskResult(error=error, spawned_tasks=additional_tasks) def update_repo_and_plugins(repo, repo_delta, importer_config, distributor_configs): """ Update a reposiory and its related collections. All details do not need to be specified; if a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt to update the repository object, then the importer, then the distributors. If an exception occurs during any of these steps, the updates stop and the exception is immediately raised. Any updates that have already taken place are not rolled back.
try: report = bind_manager.unbind(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: unbind_errors.append(e) bind_error = None if unbind_errors: bind_error = exceptions.PulpCodedException(PLP0003, repo_id=repo_id, distributor_id=dist_id) bind_error.child_exceptions = unbind_errors return TaskResult(error=bind_error, spawned_tasks=additional_tasks) def queue_update(distributor, config, delta): """ Dispatch a task to update a distributor. :param distributor: distributor to be updated :type distributor: pulp.server.db.model.Distributor :param config: A configuration dictionary for a distributor instance. The contents of this dict depends on the type of distributor. Values of None will remove they key from the config. Keys ommited from this dictionary will remain unchanged. :type config: dict :param delta: A dictionary used to change conf values for a distributor instance. This currently only supports the 'auto_publish' keyword, which should have a value of type bool :type delta: dict or None
def update_repo_and_plugins(repo_id, repo_delta, importer_config, distributor_configs): """ Aggregate method that will update one or more of the following: * Repository metadata * Importer config * Zero or more distributors on the repository All of the above pieces do not need to be specified. If a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt the updates in the order listed above. If an exception occurs during any of these steps, the updates stop and the exception is immediately raised. Any updates that have already taken place are not rolled back. This call will call out to RepoImporterManager.update_importer_config. Documentation for that method, especially possible exceptions, should be consulted for more information. Distributor updates will happen asynchronously as there could be a very large number of consumers to update and the repo update call is usually made synchronously. :param repo_id: unique identifier for the repo :type repo_id: str :param repo_delta: list of attributes and their new values to change; if None, no attempt to update the repo's metadata will be made :type repo_delta: dict, None :param importer_config: new configuration to use for the repo's importer; if None, no attempt will be made to update the importer :type importer_config: dict, None :param distributor_configs: mapping of distributor ID to the new configuration to set for it :type distributor_configs: dict, None :return: updated repository object, same as returned from update_repo :rtype: TaskResult """ # Repo Update if repo_delta is None: repo_delta = {} repo = RepoManager.update_repo(repo_id, repo_delta) # Importer Update if importer_config is not None: importer_manager = manager_factory.repo_importer_manager() importer_manager.update_importer_config(repo_id, importer_config) additional_tasks = [] # Distributor Update if distributor_configs is not None: for dist_id, dist_config in distributor_configs.items(): task_tags = [ tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id), tags.resource_tag( tags.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, dist_id), tags.action_tag(tags.ACTION_UPDATE_DISTRIBUTOR) ] async_result = repository.distributor_update.apply_async_with_reservation( tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, dist_id, dist_config, None], tags=task_tags) additional_tasks.append(async_result) return TaskResult(repo, None, additional_tasks)