def publish(repo_id, distributor_id, publish_config_override=None): """ Requests the given distributor publish the repository it is configured on. The publish operation is executed synchronously in the caller's thread and will block until it is completed. The caller must take the necessary steps to address the fact that a publish call may be time intensive. @param repo_id: identifies the repo being published @type repo_id: str @param distributor_id: identifies the repo's distributor to publish @type distributor_id: str @param publish_config_override: optional config values to use for this publish call only @type publish_config_override: dict, None :return: report of the details of the publish :rtype: pulp.server.db.model.repository.RepoPublishResult """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(repository=repo_id, distributor=distributor_id) distributor_instance, distributor_config = RepoPublishManager.\ _get_distributor_instance_and_config(repo_id, distributor_id) # Assemble the data needed for the publish conduit = RepoPublishConduit(repo_id, distributor_id) call_config = PluginCallConfiguration(distributor_config, repo_distributor['config'], publish_config_override) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.get_working_directory() # Fire events describing the publish state fire_manager = manager_factory.event_fire_manager() fire_manager.fire_repo_publish_started(repo_id, distributor_id) result = RepoPublishManager._do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config) fire_manager.fire_repo_publish_finished(result) return result
def get_bind(consumer_id, repo_id, distributor_id): """ Get a specific bind. This method ignores the deleted flag. :param consumer_id: uniquely identifies the consumer. :type consumer_id: str :param repo_id: uniquely identifies the repository. :type repo_id: str :param distributor_id: uniquely identifies a distributor. :type distributor_id: str :return: A specific bind. :rtype: SON :raise MissingResource: if the binding doesn't exist """ collection = Bind.get_collection() bind_id = BindManager.bind_id(consumer_id, repo_id, distributor_id) bind = collection.find_one(bind_id) if bind is None: # If the binding doesn't exist, report which values are not present missing_values = BindManager._validate_consumer_repo(consumer_id, repo_id, distributor_id) if missing_values: raise MissingResource(**missing_values) else: # In this case, every resource is present, but the consumer isn't bound to that # repo/distributor raise MissingResource(bind_id=bind_id) return bind
def add_user_to_role(self, role_id, login): """ Add a user to a role. This has the side-effect of granting all the permissions granted to the role to the user. @type role_id: str @param role_id: role identifier @type login: str @param login: login of user @rtype: bool @return: True on success @raise MissingResource: if the given role or user does not exist """ role = Role.get_collection().find_one({'id': role_id}) if role is None: raise MissingResource(role_id) user = User.get_collection().find_one({'login': login}) if user is None: raise MissingResource(login) if role_id in user['roles']: return user['roles'].append(role_id) User.get_collection().save(user, safe=True) for resource, operations in role['permissions'].items(): factory.permission_manager().grant(resource, login, operations)
def get(self, event_listener_id): """ Retrieves the given event listener if it exists. If not, an exception is raised. @param event_listener_id: listener to retrieve @type event_listener_id: str @return: listener instance from the database @rtype: dict @raise MissingResource: if no listener exists at the given ID """ collection = EventListener.get_collection() try: id = ObjectId(event_listener_id) except InvalidId: raise MissingResource( event_listener=event_listener_id), None, sys.exc_info()[2] listener = collection.find_one({'_id': id}) if listener is None: raise MissingResource(event_listener=event_listener_id) else: return listener
def remove_distributor(repo_id, distributor_id): """ Removes a distributor from a repository. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor to delete @type distributor_id: str @raise MissingResource: if repo_id doesn't correspond to a valid repo @raise MissingResource: if there is no distributor with the given ID """ repo_coll = Repo.get_collection() distributor_coll = RepoDistributor.get_collection() # Validation repo = repo_coll.find_one({'id': repo_id}) if repo is None: raise MissingResource(repository=repo_id) repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if repo_distributor is None: raise MissingResource(distributor=distributor_id) # remove schedules RepoPublishScheduleManager().delete_by_distributor_id( repo_id, repo_distributor['id']) # Call the distributor's cleanup method distributor_type_id = repo_distributor['distributor_type_id'] distributor_instance, plugin_config = plugin_api.get_distributor_by_id( distributor_type_id) call_config = PluginCallConfiguration(plugin_config, repo_distributor['config']) transfer_repo = common_utils.to_transfer_repo(repo) transfer_repo.working_dir = common_utils.distributor_working_dir( distributor_type_id, repo_id) distributor_instance.distributor_removed(transfer_repo, call_config) # Update the database to reflect the removal distributor_coll.remove({'_id': repo_distributor['_id']}, safe=True)
def get(self, request, consumer_id, schedule_id): """ List a specific schedule <action>. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param consumer_id: The consumer ID. :type consumer_id: str :param schedule_id: the schedule id :type schedule_id: str :raises MissingResource: if consumer/schedule does not exist :return: Response containing consumer's schedule <action> :rtype: django.http.HttpResponse """ scheduled_call = None for call in self.manager.get(consumer_id, self.ACTION): if call.id == schedule_id: scheduled_call = call break if scheduled_call is None: raise MissingResource(consumer_id=consumer_id, schedule_id=schedule_id) scheduled_obj = scheduled_unit_management_obj( scheduled_call.for_display()) add_link_schedule(scheduled_obj, self.ACTION, consumer_id) return generate_json_response_with_pulp_encoder(scheduled_obj)
def save_data(self, upload_id, offset, data): """ Saves bits into the given upload request starting at an offset value. The initialize_upload method should be called prior to this method to retrieve the upload_id value and perform any steps necessary before bits can be saved. @param upload_id: upload request ID @type upload_id: str @param offset: area in the uploaded file to start writing at @type offset: int @param data: content to write to the file @type data: str """ file_path = ContentUploadManager._upload_file_path(upload_id) # Make sure the upload was initialized first and hasn't been deleted if not os.path.exists(file_path): raise MissingResource(upload_request=upload_id) f = open(file_path, 'r+') f.seek(offset) f.write(data) f.close()
def last_publish(self, repo_id, distributor_id): """ Returns the timestamp of the last publish call, regardless of its success or failure. If the repo has never been published, returns None. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the repo's distributor @type distributor_id: str @return: timestamp of the last publish @rtype: datetime or None @raise MissingResource: if there is no distributor identified by the given repo ID and distributor ID """ collection = RepoDistributor.get_collection() distributor = collection.find_one({ 'repo_id': repo_id, 'id': distributor_id }) if distributor is None: raise MissingResource(repo_id) return distributor['last_publish']
def record_event(self, consumer_id, event_type, event_details=None): """ @ivar consumer_id: identifies the consumer @type id: str @param type: event type @type type: str @param details: event details @type details: dict @raises MissingResource: if the given consumer does not exist @raises InvalidValue: if any of the fields is unacceptable """ # Check that consumer exists for all except registration event existing_consumer = Consumer.get_collection().find_one( {'id': consumer_id}) if not existing_consumer and event_type != TYPE_CONSUMER_UNREGISTERED: raise MissingResource(consumer=consumer_id) invalid_values = [] if event_type not in TYPES: invalid_values.append('event_type') if event_details is not None and not isinstance(event_details, dict): invalid_values.append('event_details') if invalid_values: raise InvalidValue(invalid_values) event = ConsumerHistoryEvent(consumer_id, self._originator(), event_type, event_details) ConsumerHistoryEvent.get_collection().save(event)
def GET(self, consumer_id, repo_id=None): """ Fetch all bind objects referencing the specified consumer_id. Optionally, specify a repo_id to fetch all bind objects for the consumer_id to the repo_id :param consumer_id: The specified consumer. :type consumer_id: str :param repo_id: The repository to retrieve bindings for (optional) :type repo_id: str :return: A list of dictionaries that represent pulp.server.db.model.consumer.Bind objects :rtype: list """ # Check to make sure the resources exist missing_resources = {} if repo_id is not None: repo = managers.repo_query_manager().find_by_id(repo_id) if repo is None: missing_resources['repo_id'] = repo_id # If get_consumer raises MissingResource we might miss reporting a bad repo_id try: managers.consumer_manager().get_consumer(consumer_id) except MissingResource: missing_resources['consumer_id'] = consumer_id if len(missing_resources) > 0: raise MissingResource(**missing_resources) manager = managers.consumer_bind_manager() bindings = manager.find_by_consumer(consumer_id, repo_id) bindings = [serialization.binding.serialize(b) for b in bindings] return self.ok(bindings)
def get(self, request, source_id): """ Get a content source by ID. :param request: WSGI request object, body contains bits to upload :type request: django.core.handlers.wsgi.WSGIRequest :param source_id: A content source ID. :type source_id: str :raises: MissingResource if source id does not exist :return: requested content source object. :rtype: django.http.HttpResponse """ container = ContentContainer() source = container.sources.get(source_id) if source: d = source.dict() link = { '_href': reverse('content_sources_resource', kwargs={'source_id': source.id}) } d.update(link) return generate_json_response_with_pulp_encoder(d) else: raise MissingResource(source_id=source_id)
def add_user_to_role(role_id, login): """ Add a user to a role. This has the side-effect of granting all the permissions granted to the role to the user. :param role_id: role identifier :type role_id: str :param login: login of user :type login: str :raise MissingResource: if the given role does not exist :raise InvalidValue: if some params are invalid """ role = Role.get_collection().find_one({'id': role_id}) if role is None: raise MissingResource(role_id) user = model.User.objects(login=login).first() if user is None: raise InvalidValue(['login']) if role_id in user.roles: return user.roles.append(role_id) user.save() for item in role['permissions']: factory.permission_manager().grant(item['resource'], login, item.get('permission', []))
def post(self, request, consumer_id): """ Creates an async task to regenerate content applicability data for given consumer. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param consumer_id: The consumer ID. :type consumer_id: str :raises MissingResource: if some parameters are missing :raises OperationPostponed: when an async operation is performed. """ consumer_query_manager = factory.consumer_query_manager() if consumer_query_manager.find_by_id(consumer_id) is None: raise MissingResource(consumer_id=consumer_id) consumer_criteria = Criteria(filters={'consumer_id': consumer_id}) task_tags = [ tags.action_tag('consumer_content_applicability_regeneration') ] async_result = regenerate_applicability_for_consumers.apply_async_with_reservation( tags.RESOURCE_CONSUMER_TYPE, consumer_id, (consumer_criteria.as_dict(), ), tags=task_tags) raise OperationPostponed(async_result)
def sync_history(self, repo_id, limit=None): """ Returns sync history entries for the given repo, sorted from most recent to oldest. If there are no entries, an empty list is returned. @param repo_id: identifies the repo @type repo_id: str @param limit: maximum number of results to return @type limit: int @return: list of sync history result instances @rtype: list of L{pulp.server.db.model.repository.RepoSyncResult} @raise MissingResource: if repo_id does not reference a valid repo """ # Validation repo = Repo.get_collection().find_one({'id' : repo_id}) if repo is None: raise MissingResource(repo_id) if limit is None: limit = 10 # default here for each of REST API calls into here # Retrieve the entries cursor = RepoSyncResult.get_collection().find({'repo_id' : repo_id}) cursor.limit(limit) cursor.sort('completed', pymongo.DESCENDING) return list(cursor)
def add_user_to_role(role_id, login): """ Add a user to a role. This has the side-effect of granting all the permissions granted to the role to the user. :param role_id: role identifier :type role_id: str :param login: login of user :type login: str :raise MissingResource: if the given role or user does not exist """ role = Role.get_collection().find_one({'id': role_id}) if role is None: raise MissingResource(role_id) user = User.get_collection().find_one({'login': login}) if user is None: raise InvalidValue(['login']) if role_id in user['roles']: return user['roles'].append(role_id) User.get_collection().save(user, safe=True) for item in role['permissions']: factory.permission_manager().grant(item['resource'], login, item.get('permission', []))
def get_content_unit_by_keys_dict(self, content_type, unit_keys_dict, model_fields=None): """ Look up an individual content unit in the corresponding content type collection using the given keys dictionary. @param content_type: unique id of content collection @type content_type: str @param unit_keys_dict: dictionary of key, value pairs that can uniquely identify a content unit @type unit_keys_dict: dict @param model_fields: fields of each content unit to report, None means all fields @type model_fields: None or list of str's @return: content unit from the content type collection that matches the keys dict @rtype: dict @raise: ValueError if the unit_keys_dict is invalid @raise: L{MissingResource} if no content unit in the content type collection matches the keys dict """ units = self.get_multiple_units_by_keys_dicts(content_type, (unit_keys_dict, ), model_fields) if not units: raise MissingResource( _('No content unit for keys: %(k)s') % {'k': pformat(unit_keys_dict)}) return units[0]
def update_permission(self, resource_uri, delta): """ Updates a permission object. @param resource_uri: identifies the resource URI of the permission being deleted @type resource_uri: str @param delta: A dict containing update keywords. @type delta: dict @return: The updated object @rtype: dict """ # Check whether the permission exists found = Permission.get_collection().find_one( {'resource': resource_uri}) if found is None: raise MissingResource(resource_uri) for key, value in delta.items(): # simple changes if key in ('users', ): found[key] = value continue # unsupported raise PulpDataException( _("Update Keyword [%s] is not supported" % key)) Permission.get_collection().save(found, safe=True)
def create_schedule(cls, action, consumer_id, units, options, schedule, failure_threshold=None, enabled=True): """ Creates a new schedule for a consumer action :param action: a unique identified for an action, one of UNIT_INSTALL_ACTION, UNIT_UPDATE_ACTION, UNIT_UNINSTALL_ACTION :type action: basestring :param consumer_id: a unique ID for a consumer :type consumer_id: basestring :param units: A list of content units to be installed, each as a dict in the form: { type_id:<str>, unit_key:<dict> } :type units: list :param options: a dictionary that will be passed to the action-appropriate task as the "options" argument :type options: dict :param schedule: ISO8601 string representation of the schedule :type schedule: basestring :param failure_threshold: optional positive integer indicating how many times this schedule's execution can fail before being automatically disabled. :type failure_threshold: int or NoneType :param enabled: boolean indicating if this schedule should be actively loaded and executed by the scheduler. Defaults to True. :type enabled: bool :return: instance of the new ScheduledCal :rtype: pulp.server.db.models.dispatch.ScheduledCall :raise: pulp.server.exceptions.MissingResource """ cls._validate_consumer(consumer_id) utils.validate_initial_schedule_options(schedule, failure_threshold, enabled) if not units: raise MissingResource(['units']) task = ACTIONS_TO_TASKS[action] args = [consumer_id] kwargs = {'units': units, 'options': options} resource = Consumer.build_resource_tag(consumer_id) schedule = ScheduledCall(schedule, task, args=args, kwargs=kwargs, resource=resource, failure_threshold=failure_threshold, enabled=enabled) schedule.save() return schedule
def get(self, request, consumer_id): """ List schedules <action>. :param request: WSGI request object :type request: django.core.handlers.wsgi.WSGIRequest :param consumer_id: The consumer ID. :type consumer_id: str :raises MissingResource: if consumer does not exist :return: Response containing consumer's schedules <action> :rtype: django.http.HttpResponse """ try: factory.consumer_manager().get_consumer(consumer_id) except MissingResource: raise MissingResource(consumer_id=consumer_id) schedules = self.manager.get(consumer_id, self.ACTION) schedule_objs = [] for schedule in schedules: obj = scheduled_unit_management_obj(schedule.for_display()) add_link_schedule(obj, self.ACTION, consumer_id) schedule_objs.append(obj) return generate_json_response_with_pulp_encoder(schedule_objs)
def add_permissions_to_role(self, role_id, resource, operations): """ Add permissions to a role. @type role_id: str @param role_id: role identifier @type resource: str @param resource: resource path to grant permissions to @type operations: list of allowed operations being granted @param operations: list or tuple @raise MissingResource: if the given role does not exist """ if role_id == self.super_user_role: raise PulpDataException(_('super-users role cannot be changed')) role = Role.get_collection().find_one({'id': role_id}) if role is None: raise MissingResource(role_id) current_ops = role['permissions'].setdefault(resource, []) for o in operations: if o in current_ops: continue current_ops.append(o) users = factory.user_query_manager().find_users_belonging_to_role( role_id) for user in users: factory.permission_manager().grant(resource, user['login'], operations) Role.get_collection().save(role, safe=True)
def update_permission(resource_uri, delta): """ Updates a permission object. :param resource_uri: identifies the resource URI of the permission being deleted :type resource_uri: str :param delta: A dict containing update keywords. :type delta: dict :raises MissingResource: if the permission does not exist :raises PulpDataException: if some usupported keys were specified """ # Check whether the permission exists found = Permission.get_collection().find_one({'resource': resource_uri}) if found is None: raise MissingResource(resource_uri) for key, value in delta.items(): # simple changes if key in ('users',): found[key] = value continue # unsupported raise PulpDataException(_("Update Keyword [%s] is not supported" % key)) Permission.get_collection().save(found, safe=True)
def update_repo_scratchpad(self, repo_id, scratchpad): """ Update the repository scratchpad with the specified key-value pairs. New keys are added, existing keys are overwritten. If the scratchpad dictionary is empty then this is a no-op. :param repo_id: A repository ID :type repo_id: str :param scratchpad: a dict used to update the scratchpad. :type scratchpad: dict :raise MissingResource: if there is no repo with repo_id """ # If no properties are set then no update should be performed # we have to perform this check as newer versions of mongo # will fail if the $set operation is fed an empty dictionary if not scratchpad: return properties = {} for k, v in scratchpad.items(): key = 'scratchpad.%s' % k properties[key] = v collection = Repo.get_collection() result = collection.update({'id': repo_id}, {'$set': properties}, safe=True) if result['n'] == 0: raise MissingResource(repo_id=repo_id)
def cancel(task_id): """ Cancel the task that is represented by the given task_id. This method cancels only the task with given task_id, not the spawned tasks. This also updates task's state to 'canceled'. :param task_id: The ID of the task you wish to cancel :type task_id: basestring :raises MissingResource: if a task with given task_id does not exist :raises PulpCodedException: if given task is already in a complete state """ try: task_status = TaskStatus.objects.get(task_id=task_id) except DoesNotExist: raise MissingResource(task_id) if task_status['state'] in constants.CALL_COMPLETE_STATES: # If the task is already done, just stop msg = _('Task [%(task_id)s] already in a completed state: %(state)s') _logger.info(msg % {'task_id': task_id, 'state': task_status['state']}) return controller.revoke(task_id, terminate=True) TaskStatus.objects(task_id=task_id, state__nin=constants.CALL_COMPLETE_STATES).\ update_one(set__state=constants.CALL_CANCELED_STATE) msg = _('Task canceled: %(task_id)s.') msg = msg % {'task_id': task_id} _logger.info(msg)
def get_distributor(self, repo_id, distributor_id): """ Returns an individual distributor on the given repo. @param repo_id: identifies the repo @type repo_id: str @param distributor_id: identifies the distributor @type distributor_id: str @return: key-value pairs describing the distributor @rtype: dict @raise MissingResource: if either the repo doesn't exist or there is no distributor with the given ID """ distributor = RepoDistributor.get_collection().find_one({ 'repo_id': repo_id, 'id': distributor_id }) if distributor is None: raise MissingResource(distributor=distributor_id) return distributor
def POST(self, group_id): """ Create a bind association between the consumers belonging to the given consumer group by id included in the URL path and a repo-distributor specified in the POST body: {repo_id:<str>, distributor_id:<str>}. Designed to be idempotent so only MissingResource is expected to be raised by manager. :param group_id: The consumer group to bind. :type group_id: str :return: list of call requests :rtype: list """ body = self.params() repo_id = body.get('repo_id') distributor_id = body.get('distributor_id') binding_config = body.get('binding_config', None) options = body.get('options', {}) notify_agent = body.get('notify_agent', True) missing_resources = verify_group_resources(group_id, repo_id, distributor_id) if missing_resources: if 'group_id' in missing_resources: raise MissingResource(**missing_resources) else: raise InvalidValue(list(missing_resources)) bind_args_tuple = (group_id, repo_id, distributor_id, notify_agent, binding_config, options) async_task = bind.apply_async(bind_args_tuple) raise pulp_exceptions.OperationPostponed(async_task)
def cancel(task_id): """ Cancel the task that is represented by the given task_id. This method cancels only the task with given task_id, not the spawned tasks. This also updates task's state to 'canceled'. :param task_id: The ID of the task you wish to cancel :type task_id: basestring :raises MissingResource: if a task with given task_id does not exist :raises PulpCodedException: if given task is already in a complete state """ task_status = TaskStatusManager.find_by_task_id(task_id) if task_status is None: raise MissingResource(task_id) if task_status['state'] in constants.CALL_COMPLETE_STATES: # If the task is already done, just stop msg = _('Task [%(task_id)s] already in a completed state: %(state)s') logger.info(msg % {'task_id': task_id, 'state': task_status['state']}) return controller.revoke(task_id, terminate=True) TaskStatus.get_collection().find_and_modify( { 'task_id': task_id, 'state': { '$nin': constants.CALL_COMPLETE_STATES } }, {'$set': { 'state': constants.CALL_CANCELED_STATE }}) msg = _('Task canceled: %(task_id)s.') msg = msg % {'task_id': task_id} logger.info(msg)
def delete_user(login): """ Deletes the given user. Deletion of last superuser is not permitted. @param login: identifies the user being deleted @type login: str @raise MissingResource: if the given user does not exist @raise InvalidValue: if login value is invalid """ # Raise exception if login is invalid if login is None or invalid_type(login, basestring): raise InvalidValue(['login']) # Check whether user exists found = User.get_collection().find_one({'login': login}) if found is None: raise MissingResource(login) # Make sure user is not the last super user if factory.user_query_manager().is_last_super_user(login): raise PulpDataException(_("The last superuser [%s] cannot be deleted" % login)) # Revoke all permissions from the user permission_manager = factory.permission_manager() permission_manager.revoke_all_permissions_from_user(login) User.get_collection().remove({'login': login})
def get_content_unit_by_keys_dict(self, content_type, unit_keys_dict, model_fields=None): """ Look up an individual content unit in the corresponding content type collection using the given keys dictionary. :param content_type: unique id of content collection :type content_type: str :param unit_keys_dict: dictionary of key, value pairs that can uniquely identify a content unit :type unit_keys_dict: dict :param model_fields: fields of each content unit to report, None means all fields :type model_fields: None or list of str's :return: content unit from the content type collection that matches the keys dict :rtype: dict :raises ValueError: if the unit_keys_dict is invalid :raises L{MissingResource}: if no content unit in the content type collection matches the keys dict """ units_gen = self.get_multiple_units_by_keys_dicts( content_type, (unit_keys_dict, ), model_fields) try: unit = units_gen.next() except StopIteration: raise MissingResource( _('No content unit for keys: %(k)s') % {'k': pformat(unit_keys_dict)}) return unit
def get_distributor(repo_group_id, distributor_id): """ Returns an individual distributor on the given repo group, raising an exception if one does not exist at the given ID. @param repo_group_id: identifies the repo group @type repo_group_id: str @param distributor_id: identifies the distributor @type distributor_id: str @return: SON representation of the distributor @rtype: dict @raise MissingResource: if either there is no distributor for the given group ID/distributor ID pair or the group itself does not exist """ # Check the group's existence for the exception contract first manager_factory.repo_group_query_manager().get_group(repo_group_id) # Check for the distributor if we know the group exists spec = { 'repo_group_id': repo_group_id, 'id': distributor_id, } distributor = RepoGroupDistributor.get_collection().find_one(spec) if distributor is None: raise MissingResource(repo_group=repo_group_id, distributor=distributor_id) return distributor
def remove_importer(repo_id): """ Removes an importer from a repository. :param repo_id: identifies the repo :type repo_id: str :raise MissingResource: if the given repo does not exist :raise MissingResource: if the given repo does not have an importer """ importer_coll = RepoImporter.get_collection() # Validation repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id) repo_importer = importer_coll.find_one({'repo_id': repo_id}) if repo_importer is None: raise MissingResource(repo_id) # remove schedules RepoSyncScheduleManager().delete_by_importer_id(repo_id, repo_importer['id']) # Call the importer's cleanup method importer_type_id = repo_importer['importer_type_id'] importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id) call_config = PluginCallConfiguration(plugin_config, repo_importer['config']) transfer_repo = repo_obj.to_transfer_repo() importer_instance.importer_removed(transfer_repo, call_config) # Update the database to reflect the removal importer_coll.remove({'repo_id': repo_id})