def POST(self, repo_group_id): # Params (validation will occur in the manager) params = self.params() distributor_type_id = params.get('distributor_type_id', None) distributor_config = params.get('distributor_config', None) distributor_id = params.get('distributor_id', None) distributor_manager = managers_factory.repo_group_distributor_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id), action_tag('add_distributor')] if distributor_id is not None: tags.append(resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id)) call_request = CallRequest(distributor_manager.add_distributor, [repo_group_id, distributor_type_id, distributor_config, distributor_id], weight=weight, tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) created = execution.execute(call_request) href = serialization.link.child_link_obj(created['id']) created.update(href) return self.created(href['_href'], created)
def POST(self): # Params params = self.params() role_id = params.get('role_id', None) resource = params.get('resource', None) operation_names = params.get('operations', None) _check_invalid_params({'role_id':role_id, 'resource':resource, 'operation_names':operation_names}) operations = _get_operations(operation_names) # Grant permission synchronously role_manager = managers.role_manager() tags = [resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id), action_tag('remove_permission_from_role')] call_request = CallRequest(role_manager.remove_permissions_from_role, [role_id, resource, operations], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_ROLE_TYPE, role_id) return self.ok(execution.execute_sync(call_request))
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() importer_type = params.get('importer_type_id', None) importer_config = params.get('importer_config', None) if importer_type is None: _LOG.error( 'Missing importer type adding importer to repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_type']) # Note: If an importer exists, it's removed, so no need to handle 409s. # Note: If the plugin raises an exception during initialization, let it # bubble up and be handled like any other 500. importer_manager = manager_factory.repo_importer_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_importer') ] call_request = CallRequest(importer_manager.set_importer, [repo_id, importer_type], {'repo_plugin_config': importer_config}, weight=weight, tags=tags, kwarg_blacklist=['repo_plugin_config']) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return execution.execute_sync_created(self, call_request, 'importer')
def publish_itinerary(repo_id, distributor_id, overrides=None): """ Create an itinerary for repo publish. @param repo_id: id of the repo to publish @type repo_id: str @param distributor_id: id of the distributor to use for the repo publish @type distributor_id: str @param overrides: dictionary of options to pass to the publish manager @type overrides: dict or None @return: list of call requests @rtype: list """ repo_publish_manager = manager_factory.repo_publish_manager() weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('publish') ] call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], {'publish_config_override': overrides}, weight=weight, tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return [call_request]
def PUT(self, consumer_id, schedule_id): consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) schedule_data = self.params() install_options = None units = schedule_data.pop('units', None) if 'options' in schedule_data: install_options = {'options': schedule_data.pop('options')} schedule_manager = managers.schedule_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_unit_uninstall_schedule')] call_request = CallRequest(schedule_manager.update_unit_uninstall_schedule, [consumer_id, schedule_id, units, install_options, schedule_data], tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) call_request.updates_resource(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call) scheduled_obj.update(serialization.link.current_link_obj()) return self.ok(scheduled_obj)
def POST(self, repo_id): """ Import an uploaded unit into the given repository. :param repo_id: The id of the repository the upload should be imported into :type repo_id: basestring :return: A json serialized dictionary with two keys. 'success_flag' indexes a boolean value that indicates whether the import was successful, and 'summary' will contain the summary as reported by the Importer. :rtype: basestring """ # Collect user input params = self.params() upload_id = params['upload_id'] unit_type_id = params['unit_type_id'] unit_key = params['unit_key'] unit_metadata = params.pop('unit_metadata', None) # Coordinator configuration tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('import_upload')] upload_manager = manager_factory.content_upload_manager() call_request = CallRequest(upload_manager.import_uploaded_unit, [repo_id, unit_type_id, unit_key, unit_metadata, upload_id], tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) report = execution.execute(call_request) return self.ok(report)
def create_publish_schedule(self, repo_id, distributor_id, publish_options, schedule_data): """ Create a new scheduled publish for the given repository and distributor. @param repo_id: @param distributor_id: @param publish_options: @param schedule_data: @return: """ # validate the input self._validate_distributor(repo_id, distributor_id) self._validate_keys(publish_options, _PUBLISH_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the publish call publish_manager = managers_factory.repo_publish_manager() args = [repo_id, distributor_id] kwargs = {'publish_config_override': publish_options['override_config']} weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)] call_request = CallRequest(publish_manager.publish, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, publish_manager.prep_publish) # schedule the publish scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) distributor_manager = managers_factory.repo_distributor_manager() distributor_manager.add_publish_schedule(repo_id, distributor_id, schedule_id) return schedule_id
def create_sync_schedule(self, repo_id, importer_id, sync_options, schedule_data): """ Create a new sync schedule for a given repository using the given importer. @param repo_id: @param importer_id: @param sync_options: @param schedule_data: @return: """ # validate the input self._validate_importer(repo_id, importer_id) self._validate_keys(sync_options, _SYNC_OPTION_KEYS) if 'schedule' not in schedule_data: raise pulp_exceptions.MissingValue(['schedule']) # build the sync call request sync_manager = managers_factory.repo_sync_manager() args = [repo_id] kwargs = {'sync_config_override': sync_options['override_config']} weight = pulp_config.config.getint('tasks', 'sync_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id)] call_request = CallRequest(sync_manager.sync, args, kwargs, weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, sync_manager.prep_sync) # schedule the sync scheduler = dispatch_factory.scheduler() schedule_id = scheduler.add(call_request, **schedule_data) importer_manager = managers_factory.repo_importer_manager() importer_manager.add_sync_schedule(repo_id, schedule_id) return schedule_id
def POST(self): # Params params = self.params() login = params.get('login', None) resource = params.get('resource', None) operation_names = params.get('operations', None) _check_invalid_params({ 'login': login, 'resource': resource, 'operation_names': operation_names }) operations = _get_operations(operation_names) # Grant permission synchronously permission_manager = managers.permission_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_PERMISSION_TYPE, resource), resource_tag(dispatch_constants.RESOURCE_USER_TYPE, login), action_tag('grant_permission_to_user') ] call_request = CallRequest(permission_manager.grant, [resource, login, operations], tags=tags) call_request.reads_resource(dispatch_constants.RESOURCE_USER_TYPE, login) call_request.updates_resource( dispatch_constants.RESOURCE_PERMISSION_TYPE, resource) return self.ok(execution.execute_sync(call_request))
def publish_itinerary(repo_id, distributor_id, overrides=None): """ Create an itinerary for repo publish. @param repo_id: id of the repo to publish @type repo_id: str @param distributor_id: id of the distributor to use for the repo publish @type distributor_id: str @param overrides: dictionary of options to pass to the publish manager @type overrides: dict or None @return: list of call requests @rtype: list """ repo_publish_manager = manager_factory.repo_publish_manager() weight = pulp_config.config.getint('tasks', 'publish_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('publish')] call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], {'publish_config_override': overrides}, weight=weight, tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return [call_request]
def PUT(self, repo_group_id, distributor_id): params = self.params() distributor_config = params.get('distributor_config', None) if distributor_config is None: raise pulp_exceptions.MissingValue(['distributor_config']) distributor_manager = managers_factory.repo_group_distributor_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id), action_tag('update_distributor') ] call_request = CallRequest(distributor_manager.update_distributor_config, args=[repo_group_id, distributor_id, distributor_config], tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id) result = execution.execute(call_request) href = serialization.link.current_link_obj() result.update(href) return self.ok(result)
def POST(self, repo_id): # Collect user input params = self.params() upload_id = params['upload_id'] unit_type_id = params['unit_type_id'] unit_key = params['unit_key'] unit_metadata = params.pop('unit_metadata', None) # Coordinator configuration tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('import_upload') ] upload_manager = manager_factory.content_upload_manager() call_request = CallRequest( upload_manager.import_uploaded_unit, [repo_id, unit_type_id, unit_key, unit_metadata, upload_id], tags=tags, archive=True) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) execution.execute(call_request) return self.ok(None)
def PUT(self, id): parameters = self.params() delta = parameters.get('delta', None) importer_config = parameters.get('importer_config', None) distributor_configs = parameters.get('distributor_configs', None) repo_manager = manager_factory.repo_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, id), action_tag('update') ] call_request = CallRequest( repo_manager.update_repo_and_plugins, [id, delta], { 'importer_config': importer_config, 'distributor_configs': distributor_configs }, tags=tags, archive=True, kwarg_blacklist=['importer_config', 'distributor_configs']) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, id) repo = execution.execute(call_request) repo.update(serialization.link.current_link_obj()) return self.ok(repo)
def POST(self, repo_id): params = self.params() criteria = params.get('criteria', None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: _LOG.error('Error parsing unassociation criteria [%s]' % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] association_manager = manager_factory.repo_unit_association_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('unassociate') ] call_request = CallRequest( association_manager.unassociate_by_criteria, [ repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER, manager_factory.principal_manager().get_principal()['login'] ], tags=tags, archive=True) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return execution.execute_async(self, call_request)
def POST(self, repo_id, distributor_id): distributor_manager = manager_factory.repo_distributor_manager() distributor_manager.get_distributor(repo_id, distributor_id) schedule_options = self.params() publish_options = {'override_config': schedule_options.pop('override_config', {})} schedule_manager = manager_factory.schedule_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('create_publish_schedule')] call_request = CallRequest(schedule_manager.create_publish_schedule, [repo_id, distributor_id, publish_options, schedule_options], weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def POST(self, repo_group_id): params = self.params() distributor_id = params.get('id', None) overrides = params.get('override_config', None) if distributor_id is None: raise MissingValue(['id']) publish_manager = managers_factory.repo_group_publish_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id), action_tag('publish') ] weight = pulp_config.config.getint('tasks', 'publish_weight') call_request = CallRequest(publish_manager.publish, args=[repo_group_id, distributor_id], kwargs={'publish_config_override' : overrides}, tags=tags, weight=weight, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, publish_manager.prep_publish) return execution.execute_async(self, call_request)
def POST(self): """ Creates an async task to regenerate content applicability data for given updated repositories. body {repo_criteria:<dict>} """ body = self.params() repo_criteria = body.get('repo_criteria', None) if repo_criteria is None: raise exceptions.MissingValue('repo_criteria') try: repo_criteria = Criteria.from_client_input(repo_criteria) except: raise exceptions.InvalidValue('repo_criteria') manager = manager_factory.applicability_regeneration_manager() regeneration_tag = action_tag('applicability_regeneration') call_request = CallRequest(manager.regenerate_applicability_for_repos, [repo_criteria], tags = [regeneration_tag]) # allow only one applicability regeneration task at a time call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_PROFILE_APPLICABILITY_TYPE, dispatch_constants.RESOURCE_ANY_ID) return execution.execute_async(self, call_request)
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() distributor_type = params.get('distributor_type_id', None) distributor_config = params.get('distributor_config', None) distributor_id = params.get('distributor_id', None) auto_publish = params.get('auto_publish', False) # Update the repo distributor_manager = manager_factory.repo_distributor_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_distributor')] if distributor_id is not None: tags.append(resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)) call_request = CallRequest(distributor_manager.add_distributor, [repo_id, distributor_type], {'repo_plugin_config': distributor_config, 'auto_publish': auto_publish, 'distributor_id': distributor_id}, weight=weight, tags=tags, kwarg_blacklist=['repo_plugin_config']) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) if distributor_id is not None: call_request.creates_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) return execution.execute_created(self, call_request, distributor_id)
def POST(self, repo_group_id): params = self.params() distributor_id = params.get('id', None) overrides = params.get('override_config', None) if distributor_id is None: raise MissingValue(['id']) publish_manager = managers_factory.repo_group_publish_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id), action_tag('publish') ] weight = pulp_config.config.getint('tasks', 'publish_weight') call_request = CallRequest(publish_manager.publish, args=[repo_group_id, distributor_id], kwargs={'publish_config_override' : overrides}, tags=tags, weight=weight, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id) call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, publish_manager.prep_publish) return execution.execute_async(self, call_request)
def POST(self): # Params params = self.params() login = params.get('login', None) resource = params.get('resource', None) operation_names = params.get('operations', None) _check_invalid_params({'login':login, 'resource':resource, 'operation_names':operation_names}) operations = _get_operations(operation_names) # Grant permission synchronously permission_manager = managers.permission_manager() tags = [resource_tag(dispatch_constants.RESOURCE_PERMISSION_TYPE, resource), resource_tag(dispatch_constants.RESOURCE_USER_TYPE, login), action_tag('grant_permission_to_user')] call_request = CallRequest(permission_manager.grant, [resource, login, operations], tags=tags) call_request.reads_resource(dispatch_constants.RESOURCE_USER_TYPE, login) call_request.updates_resource(dispatch_constants.RESOURCE_PERMISSION_TYPE, resource) return self.ok(execution.execute_sync(call_request))
def POST(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id) schedule_options = self.params() sync_options = {'override_config': schedule_options.pop('override_config', {})} schedule_manager = manager_factory.schedule_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('create_sync_schedule')] call_request = CallRequest(schedule_manager.create_sync_schedule, [repo_id, importer_id, sync_options, schedule_options], weight=weight, tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() importer_type = params.get('importer_type_id', None) importer_config = params.get('importer_config', None) if importer_type is None: _LOG.error('Missing importer type adding importer to repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_type']) # Note: If an importer exists, it's removed, so no need to handle 409s. # Note: If the plugin raises an exception during initialization, let it # bubble up and be handled like any other 500. importer_manager = manager_factory.repo_importer_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_importer')] call_request = CallRequest(importer_manager.set_importer, [repo_id, importer_type], {'repo_plugin_config': importer_config}, weight=weight, tags=tags, kwarg_blacklist=['repo_plugin_config']) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) return execution.execute_sync_created(self, call_request, 'importer')
def POST(self): # Params params = self.params() role_id = params.get('role_id', None) resource = params.get('resource', None) operation_names = params.get('operations', None) _check_invalid_params({ 'role_id': role_id, 'resource': resource, 'operation_names': operation_names }) operations = _get_operations(operation_names) # Grant permission synchronously role_manager = managers.role_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id), action_tag('remove_permission_from_role') ] call_request = CallRequest(role_manager.remove_permissions_from_role, [role_id, resource, operations], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_ROLE_TYPE, role_id) return self.ok(execution.execute_sync(call_request))
def DELETE(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules( repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) schedule_manager = manager_factory.schedule_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('delete_publish_schedule') ] call_request = CallRequest(schedule_manager.delete_publish_schedule, [repo_id, distributor_id, schedule_id], tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.deletes_resource( dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) result = execution.execute(call_request) return self.ok(result)
def PUT(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules(repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) publish_update = {} schedule_update = self.params() if 'override_config' in schedule_update: publish_update['override_config'] = schedule_update.pop('override_config') schedule_manager = manager_factory.schedule_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_publish_schedule')] call_request = CallRequest(schedule_manager.update_publish_schedule, [repo_id, distributor_id, schedule_id, publish_update, schedule_update], tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.reads_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.updates_resource(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def PUT(self, repo_id, importer_id): # Params (validation will occur in the manager) params = self.params() importer_config = params.get('importer_config', None) if importer_config is None: _LOG.error( 'Missing configuration updating importer for repository [%s]' % repo_id) raise exceptions.MissingValue(['importer_config']) importer_manager = manager_factory.repo_importer_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('update_importer') ] call_request = CallRequest(importer_manager.update_importer_config, [repo_id], {'importer_config': importer_config}, tags=tags, archive=True, kwarg_blacklist=['importer_config']) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) result = execution.execute(call_request) return self.ok(result)
def PUT(self, consumer_id, schedule_id): consumer_manager = managers.consumer_manager() consumer_manager.get_consumer(consumer_id) schedule_data = self.params() install_options = None units = schedule_data.pop('units', None) if 'options' in schedule_data: install_options = {'options': schedule_data.pop('options')} schedule_manager = managers.schedule_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_unit_uninstall_schedule')] call_request = CallRequest(schedule_manager.update_unit_uninstall_schedule, [consumer_id, schedule_id, units, install_options, schedule_data], tags=tags, archive=True) call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id) call_request.updates_resource(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() scheduled_call = scheduler.get(schedule_id) scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call) scheduled_obj.update(serialization.link.current_link_obj()) return self.ok(scheduled_obj)
def POST(self, consumer_group_id): criteria = Criteria.from_client_input(self.params().get('criteria', {})) manager = managers_factory.consumer_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id), action_tag('consumer_group_unassociate')] call_request = CallRequest(manager.unassociate, [consumer_group_id, criteria], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id) matched = execution.execute(call_request) return self.ok(matched)
def PUT(self, consumer_group_id): update_data = self.params() manager = managers_factory.consumer_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id)] call_request = CallRequest(manager.update_consumer_group, args=[consumer_group_id], kwargs=update_data, tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id) group = execution.execute(call_request) group.update(serialization.link.current_link_obj()) return self.ok(group)
def distributor_update_itinerary(repo_id, distributor_id, config): """ Get the itinerary for updating a repository distributor. 1. Update the distributor on the sever. 2. (re)bind any bound consumers. @param repo_id: A repository ID. @type repo_id: str @return: A list of call_requests known as an itinerary. @rtype list """ call_requests = [] # update the distributor manager = managers.repo_distributor_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('update_distributor') ] update_request = CallRequest( manager.update_distributor_config, [repo_id, distributor_id], {'distributor_config': config}, tags=tags, archive=True, kwarg_blacklist=['distributor_config']) update_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) update_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_requests.append(update_request) # append unbind itineraries foreach bound consumer options = {} manager = managers.consumer_bind_manager() for bind in manager.find_by_distributor(repo_id, distributor_id): bind_requests = bind_itinerary( bind['consumer_id'], bind['repo_id'], bind['distributor_id'], bind['notify_agent'], bind['binding_config'], options) if bind_requests: bind_requests[0].depends_on(update_request.id) call_requests.extend(bind_requests) return call_requests
def DELETE(self, role_id, login): role_manager = managers.role_manager() tags = [resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id), action_tag('remove_user_from_role')] call_request = CallRequest(role_manager.remove_user_from_role, [role_id, login], tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_USER_TYPE, login) call_request.reads_resource(dispatch_constants.RESOURCE_ROLE_TYPE, role_id) return self.ok(execution.execute_sync(call_request))
def POST(self, repo_id): # TODO: Add timeout support # Params params = self.params() overrides = params.get("override_config", None) # Execute the sync asynchronously repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint("tasks", "sync_weight") sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("sync")] sync_call_request = CallRequest( repo_sync_manager.sync, [repo_id], {"sync_config_override": overrides}, weight=sync_weight, tags=sync_tags, archive=True, ) sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) sync_call_request.add_life_cycle_callback( dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync ) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("auto_publish"), action_tag("publish"), ] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor["id"] publish_call_request = CallRequest( repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True ) publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.add_life_cycle_callback( dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish ) publish_call_request.depends_on(sync_call_request) call_requests.append(publish_call_request) # this raises an exception that is handled by the middleware, # so no return is needed execution.execute_multiple(call_requests)
def PUT(self, repo_group_id): update_data = self.params() manager = managers_factory.repo_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id)] call_request = CallRequest(manager.update_repo_group, args=[repo_group_id], kwargs=update_data, tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) group = execution.execute(call_request) group.update(serialization.link.current_link_obj()) return self.ok(group)
def POST(self, dest_repo_id): # Params params = self.params() source_repo_id = params.get('source_repo_id', None) overrides = params.get('override_config', None) if source_repo_id is None: raise exceptions.MissingValue(['source_repo_id']) # A 404 only applies to things in the URL, so the destination repo # check allows the MissingResource to bubble up, but if the source # repo doesn't exist, it's considered bad data. repo_query_manager = manager_factory.repo_query_manager() repo_query_manager.get_repository(dest_repo_id) try: repo_query_manager.get_repository(source_repo_id) except exceptions.MissingResource: raise exceptions.InvalidValue(['source_repo_id']) criteria = params.get('criteria', None) if criteria is not None: try: criteria = UnitAssociationCriteria.from_client_input(criteria) except: _LOG.error('Error parsing association criteria [%s]' % criteria) raise exceptions.PulpDataException(), None, sys.exc_info()[2] association_manager = manager_factory.repo_unit_association_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, dest_repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, source_repo_id), action_tag('associate') ] call_request = CallRequest( association_manager.associate_from_repo, [source_repo_id, dest_repo_id], { 'criteria': criteria, 'import_config_override': overrides }, tags=tags, archive=True, kwarg_blacklist=['criteria', 'import_config_override']) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, source_repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, dest_repo_id) return execution.execute_async(self, call_request)
def sync_with_auto_publish_itinerary(repo_id, overrides=None): """ Create a call request list for the synchronization of a repository and the publishing of any distributors that are configured for auto publish. @param repo_id: id of the repository to create a sync call request list for @type repo_id: str @param overrides: dictionary of configuration overrides for this sync @type overrides: dict or None @return: list of call request instances @rtype: list """ repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint('tasks', 'sync_weight') sync_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('sync') ] sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id], {'sync_config_override': overrides}, weight=sync_weight, tags=sync_tags, archive=True) sync_call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('auto_publish'), action_tag('publish') ] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor['id'] publish_call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True) publish_call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.depends_on( sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE]) call_requests.append(publish_call_request) return call_requests
def POST(self, repo_group_id): criteria = Criteria.from_client_input(self.params().get('criteria', {})) manager = managers_factory.repo_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id), action_tag('repo_group_unassociate')] call_request = CallRequest(manager.unassociate, [repo_group_id, criteria], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) execution.execute(call_request) collection = RepoGroup.get_collection() group = collection.find_one({'id': repo_group_id}) return self.ok(group['repo_ids'])
def POST(self, consumer_group_id): criteria = Criteria.from_client_input(self.params().get('criteria', {})) manager = managers_factory.consumer_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id), action_tag('consumer_group_associate')] call_request = CallRequest(manager.associate, [consumer_group_id, criteria], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id) execution.execute(call_request) collection = ConsumerGroup.get_collection() group = collection.find_one({'id': consumer_group_id}) return self.ok(group['consumer_ids'])
def POST(self, repo_group_id): criteria = Criteria.from_client_input(self.params().get('criteria', {})) manager = managers_factory.repo_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id), action_tag('repo_group_unassociate')] call_request = CallRequest(manager.unassociate, [repo_group_id, criteria], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id) execution.execute(call_request) collection = RepoGroup.get_collection() group = collection.find_one({'id': repo_group_id}) return self.ok(group['repo_ids'])
def POST(self, consumer_group_id): criteria = Criteria.from_client_input(self.params().get('criteria', {})) manager = managers_factory.consumer_group_manager() tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id), action_tag('consumer_group_unassociate')] call_request = CallRequest(manager.unassociate, [consumer_group_id, criteria], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_CONSUMER_GROUP_TYPE, consumer_group_id) execution.execute(call_request) query_manager = managers_factory.consumer_group_query_manager() group = query_manager.get_group(consumer_group_id) return self.ok(group['consumer_ids'])
def DELETE(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('delete_importer')] call_request = CallRequest(importer_manager.remove_importer, [repo_id], tags=tags, archive=True) call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.deletes_resource(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) result = execution.execute(call_request) return self.ok(result)
def sync_with_auto_publish_itinerary(repo_id, overrides=None): """ Create a call request list for the synchronization of a repository and the publishing of any distributors that are configured for auto publish. @param repo_id: id of the repository to create a sync call request list for @type repo_id: str @param overrides: dictionary of configuration overrides for this sync @type overrides: dict or None @return: list of call request instances @rtype: list """ repo_sync_manager = manager_factory.repo_sync_manager() sync_weight = pulp_config.config.getint('tasks', 'sync_weight') sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('sync')] sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id], {'sync_config_override': overrides}, weight=sync_weight, tags=sync_tags, archive=True) sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) sync_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync) call_requests = [sync_call_request] repo_publish_manager = manager_factory.repo_publish_manager() auto_publish_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('auto_publish'), action_tag('publish')] auto_distributors = repo_publish_manager.auto_distributors(repo_id) for distributor in auto_distributors: distributor_id = distributor['id'] publish_call_request = CallRequest(repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True) publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) publish_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish) publish_call_request.depends_on(sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE]) call_requests.append(publish_call_request) return call_requests
def distributor_delete_itinerary(repo_id, distributor_id): """ Get the itinerary for deleting a repository distributor. 1. Delete the distributor on the sever. 2. Unbind any bound consumers. @param repo_id: A repository ID. @type repo_id: str @return: A list of call_requests known as an itinerary. @rtype list """ call_requests = [] # delete distributor manager = managers.repo_distributor_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('remove_distributor') ] delete_request = CallRequest( manager.remove_distributor, [repo_id, distributor_id], tags=tags, archive=True) delete_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) delete_request.deletes_resource(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_requests.append(delete_request) # append unbind itineraries foreach bound consumer options = {} manager = managers.consumer_bind_manager() for bind in manager.find_by_distributor(repo_id, distributor_id): unbind_requests = unbind_itinerary( bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if unbind_requests: unbind_requests[0].depends_on(delete_request.id) call_requests.extend(unbind_requests) return call_requests
def PUT(self, id): body = self.params() delta = body.get('delta') manager = managers.consumer_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, id), action_tag('update') ] call_request = CallRequest(manager.update, [id, delta], tags=tags) call_request.updates_resource( dispatch_constants.RESOURCE_CONSUMER_TYPE, id) consumer = execution.execute(call_request) href = serialization.link.current_link_obj() consumer.update(href) return self.ok(consumer)
def PUT(self, id): body = self.params() delta = body.get('delta') manager = managers.consumer_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, id), action_tag('update') ] call_request = CallRequest( manager.update, [id, delta], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, id) consumer = execution.execute(call_request) href = serialization.link.current_link_obj() consumer.update(href) return self.ok(consumer)
def POST(self, role_id): # Params (validation will occur in the manager) params = self.params() login = params.get('login', None) if login is None: raise exceptions.InvalidValue(login) role_manager = managers.role_manager() tags = [resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id), action_tag('add_user_to_role')] call_request = CallRequest(role_manager.add_user_to_role, [role_id, login], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_USER_TYPE, login) return self.ok(execution.execute_sync(call_request))
def PUT(self, login): # Pull all the user update data user_data = self.params() delta = user_data.get('delta', None) # Perform update manager = managers.user_manager() tags = [resource_tag(dispatch_constants.RESOURCE_USER_TYPE, login), action_tag('update')] call_request = CallRequest(manager.update_user, [login, delta], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_USER_TYPE, login) result = execution.execute(call_request) result.update(serialization.link.current_link_obj()) return self.ok(result)
def PUT(self, repo_id, distributor_id, schedule_id): distributor_manager = manager_factory.repo_distributor_manager() schedule_list = distributor_manager.list_publish_schedules( repo_id, distributor_id) if schedule_id not in schedule_list: raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id) publish_update = {} schedule_update = self.params() if 'override_config' in schedule_update: publish_update['override_config'] = schedule_update.pop( 'override_config') schedule_manager = manager_factory.schedule_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id), action_tag('update_publish_schedule') ] call_request = CallRequest(schedule_manager.update_publish_schedule, [ repo_id, distributor_id, schedule_id, publish_update, schedule_update ], tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) call_request.updates_resource( dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id) execution.execute(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.current_link_obj()) return self.ok(obj)
def DELETE(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('delete_importer') ] call_request = CallRequest(importer_manager.remove_importer, [repo_id], tags=tags, archive=True) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.deletes_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) result = execution.execute(call_request) return self.ok(result)
def PUT(self, role_id): # Pull all the role update data role_data = self.params() delta = role_data.get('delta', None) # Perform update manager = managers.role_manager() tags = [resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id), action_tag('update')] call_request = CallRequest(manager.update_role, [role_id, delta], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_ROLE_TYPE, role_id) role = execution.execute(call_request) role.update(serialization.link.current_link_obj()) return self.ok(role)
def PUT(self, login): # Pull all the user update data user_data = self.params() delta = user_data.get('delta', None) # Perform update manager = managers.user_manager() tags = [ resource_tag(dispatch_constants.RESOURCE_USER_TYPE, login), action_tag('update') ] call_request = CallRequest(manager.update_user, [login, delta], tags=tags) call_request.updates_resource(dispatch_constants.RESOURCE_USER_TYPE, login) result = execution.execute(call_request) result.update(serialization.link.current_link_obj()) return self.ok(result)
def POST(self, repo_id): # Params (validation will occur in the manager) params = self.params() distributor_type = params.get('distributor_type_id', None) distributor_config = params.get('distributor_config', None) distributor_id = params.get('distributor_id', None) auto_publish = params.get('auto_publish', False) # Update the repo distributor_manager = manager_factory.repo_distributor_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag('add_distributor') ] if distributor_id is not None: tags.append( resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id)) call_request = CallRequest( distributor_manager.add_distributor, [repo_id, distributor_type], { 'repo_plugin_config': distributor_config, 'auto_publish': auto_publish, 'distributor_id': distributor_id }, weight=weight, tags=tags, kwarg_blacklist=['repo_plugin_config']) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) if distributor_id is not None: call_request.creates_resource( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) return execution.execute_created(self, call_request, distributor_id)
def POST(self, repo_id, distributor_id): distributor_manager = manager_factory.repo_distributor_manager() distributor_manager.get_distributor(repo_id, distributor_id) schedule_options = self.params() publish_options = { 'override_config': schedule_options.pop('override_config', {}) } schedule_manager = manager_factory.schedule_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id), action_tag('create_publish_schedule') ] call_request = CallRequest( schedule_manager.create_publish_schedule, [repo_id, distributor_id, publish_options, schedule_options], weight=weight, tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_publish_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)
def POST(self, repo_id, importer_id): importer_manager = manager_factory.repo_importer_manager() importer = importer_manager.get_importer(repo_id) if importer_id != importer['id']: raise exceptions.MissingResource(importer=importer_id) schedule_options = self.params() sync_options = { 'override_config': schedule_options.pop('override_config', {}) } schedule_manager = manager_factory.schedule_manager() weight = pulp_config.config.getint('tasks', 'create_weight') tags = [ resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id), action_tag('create_sync_schedule') ] call_request = CallRequest( schedule_manager.create_sync_schedule, [repo_id, importer_id, sync_options, schedule_options], weight=weight, tags=tags, archive=True) call_request.reads_resource( dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id) call_request.updates_resource( dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id) schedule_id = execution.execute_sync(call_request) scheduler = dispatch_factory.scheduler() schedule = scheduler.get(schedule_id) obj = serialization.dispatch.scheduled_sync_obj(schedule) obj.update(serialization.link.child_link_obj(schedule_id)) return self.created(obj['_href'], obj)