Example #1
0
def _get_repo_tasks(context, repo_id, action):
    """
    Retrieve a list of incomplete Task objects for the given repo_id and action. action must be one
    of 'sync', 'download', or 'publish'.

    :param context: The CLI context from Okaara
    :type  context: pulp.client.extensions.core.ClientContext
    :param repo_id: The primary key of the repository you wish to limit the Task query to
    :type  repo_id: basestring
    :param action:  One of "sync" or "publish"
    :type  action:  basestring
    :return:        A list of Task objects
    :rtype:         list
    """
    repo_tag = tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id)
    if action == 'publish':
        action_tag = tags.action_tag(tags.ACTION_PUBLISH_TYPE)
    elif action == 'sync':
        action_tag = tags.action_tag(tags.ACTION_SYNC_TYPE)
    elif action == 'download':
        action_tag = tags.action_tag(tags.ACTION_DOWNLOAD_TYPE)
    else:
        raise ValueError(
            '_get_repo_tasks() does not support %(action)s as an action.' % {'action': action})
    repo_search_criteria = {'filters': {'state': {'$nin': responses.COMPLETED_STATES},
                                        'tags': {'$all': [repo_tag, action_tag]}}}
    return context.server.tasks_search.search(**repo_search_criteria)
Example #2
0
def _get_repo_tasks(context, repo_id, action):
    """
    Retrieve a list of incomplete Task objects for the given repo_id and action. action must be one
    of 'sync' or 'publish'.

    :param context: The CLI context from Okaara
    :type  context: pulp.client.extensions.core.ClientContext
    :param repo_id: The primary key of the repository you wish to limit the Task query to
    :type  repo_id: basestring
    :param action:  One of "sync" or "publish"
    :type  action:  basestring
    :return:        A list of Task objects
    :rtype:         list
    """
    repo_tag = tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id)
    if action == 'publish':
        action_tag = tags.action_tag(tags.ACTION_PUBLISH_TYPE)
    elif action == 'sync':
        action_tag = tags.action_tag(tags.ACTION_SYNC_TYPE)
    else:
        raise ValueError(
            '_get_repo_tasks() does not support %(action)s as an action.' %
            {'action': action})
    repo_search_criteria = {
        'filters': {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [repo_tag, action_tag]
            }
        }
    }
    return context.server.tasks_search.search(**repo_search_criteria)
Example #3
0
File: cud.py Project: ehelms/pulp
    def create_unit_install_schedule(self, consumer_id, units, install_options, schedule_data ):
        """
        Create a schedule for installing content units on a consumer.
        @param consumer_id: unique id for the consumer
        @param units: list of unit type and unit key dicts
        @param install_options: options to pass to the install manager
        @param schedule_data: scheduling data
        @return: schedule id
        """
        self._validate_consumer(consumer_id)
        self._validate_keys(install_options, _UNIT_INSTALL_OPTION_KEYS)
        if 'schedule' not in schedule_data:
            raise pulp_exceptions.MissingValue(['schedule'])

        manager = managers_factory.consumer_agent_manager()
        args = [consumer_id]
        kwargs = {'units': units,
                  'options': install_options.get('options', {})}
        weight = pulp_config.config.getint('tasks', 'consumer_content_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id),
                action_tag('unit_install'), action_tag('scheduled_unit_install')]
        call_request = CallRequest(manager.install_content, args, kwargs, weight=weight, tags=tags, archive=True)
        call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id)

        scheduler = dispatch_factory.scheduler()
        schedule_id = scheduler.add(call_request, **schedule_data)
        return schedule_id
Example #4
0
    def POST(self, repo_id):

        # TODO: Add timeout support

        # Params
        params = self.params()
        overrides = params.get("override_config", None)

        # Execute the sync asynchronously
        repo_sync_manager = manager_factory.repo_sync_manager()

        sync_weight = pulp_config.config.getint("tasks", "sync_weight")
        sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("sync")]

        sync_call_request = CallRequest(
            repo_sync_manager.sync,
            [repo_id],
            {"sync_config_override": overrides},
            weight=sync_weight,
            tags=sync_tags,
            archive=True,
        )
        sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
        sync_call_request.add_life_cycle_callback(
            dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_sync_manager.prep_sync
        )

        call_requests = [sync_call_request]

        repo_publish_manager = manager_factory.repo_publish_manager()
        auto_publish_tags = [
            resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
            action_tag("auto_publish"),
            action_tag("publish"),
        ]
        auto_distributors = repo_publish_manager.auto_distributors(repo_id)

        for distributor in auto_distributors:
            distributor_id = distributor["id"]
            publish_call_request = CallRequest(
                repo_publish_manager.publish, [repo_id, distributor_id], tags=auto_publish_tags, archive=True
            )
            publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
            publish_call_request.add_life_cycle_callback(
                dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK, repo_publish_manager.prep_publish
            )
            publish_call_request.depends_on(sync_call_request)

            call_requests.append(publish_call_request)

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        execution.execute_multiple(call_requests)
Example #5
0
def sync_with_auto_publish_itinerary(repo_id, overrides=None):
    """
    Create a call request list for the synchronization of a repository and the
    publishing of any distributors that are configured for auto publish.
    @param repo_id: id of the repository to create a sync call request list for
    @type repo_id: str
    @param overrides: dictionary of configuration overrides for this sync
    @type overrides: dict or None
    @return: list of call request instances
    @rtype: list
    """

    repo_sync_manager = manager_factory.repo_sync_manager()

    sync_weight = pulp_config.config.getint('tasks', 'sync_weight')
    sync_tags = [
        resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
        action_tag('sync')
    ]

    sync_call_request = CallRequest(repo_sync_manager.sync, [repo_id],
                                    {'sync_config_override': overrides},
                                    weight=sync_weight,
                                    tags=sync_tags,
                                    archive=True)
    sync_call_request.updates_resource(
        dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)

    call_requests = [sync_call_request]

    repo_publish_manager = manager_factory.repo_publish_manager()
    auto_publish_tags = [
        resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
        action_tag('auto_publish'),
        action_tag('publish')
    ]
    auto_distributors = repo_publish_manager.auto_distributors(repo_id)

    for distributor in auto_distributors:
        distributor_id = distributor['id']
        publish_call_request = CallRequest(repo_publish_manager.publish,
                                           [repo_id, distributor_id],
                                           tags=auto_publish_tags,
                                           archive=True)
        publish_call_request.updates_resource(
            dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
        publish_call_request.depends_on(
            sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE])

        call_requests.append(publish_call_request)

    return call_requests
Example #6
0
File: bind.py Project: ashcrow/pulp
    def task_header(self, task):

        handlers = {
            tags.action_tag(tags.ACTION_BIND) : self._render_bind_header,
            tags.action_tag(tags.ACTION_AGENT_BIND) : self._render_agent_bind_header,
        }

        # There will be exactly 1 action tag for each task (multiple resource tags)
        action_tags = [t for t in task.tags if tags.is_action_tag(t)]
        action_tag = action_tags[0]

        handler = handlers[action_tag]
        handler()
Example #7
0
def sync_with_auto_publish_itinerary(repo_id, overrides=None):
    """
    Create a call request list for the synchronization of a repository and the
    publishing of any distributors that are configured for auto publish.
    @param repo_id: id of the repository to create a sync call request list for
    @type repo_id: str
    @param overrides: dictionary of configuration overrides for this sync
    @type overrides: dict or None
    @return: list of call request instances
    @rtype: list
    """

    repo_sync_manager = manager_factory.repo_sync_manager()

    sync_weight = pulp_config.config.getint('tasks', 'sync_weight')
    sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                 action_tag('sync')]

    sync_call_request = CallRequest(repo_sync_manager.sync,
                                    [repo_id],
                                    {'sync_config_override': overrides},
                                    weight=sync_weight,
                                    tags=sync_tags,
                                    archive=True)
    sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
    sync_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK,
                                              repo_sync_manager.prep_sync)

    call_requests = [sync_call_request]

    repo_publish_manager = manager_factory.repo_publish_manager()
    auto_publish_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                         action_tag('auto_publish'), action_tag('publish')]
    auto_distributors = repo_publish_manager.auto_distributors(repo_id)

    for distributor in auto_distributors:
        distributor_id = distributor['id']
        publish_call_request = CallRequest(repo_publish_manager.publish,
                                           [repo_id, distributor_id],
                                           tags=auto_publish_tags,
                                           archive=True)
        publish_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
        publish_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK,
                                                     repo_publish_manager.prep_publish)
        publish_call_request.depends_on(sync_call_request.id, [dispatch_constants.CALL_FINISHED_STATE])

        call_requests.append(publish_call_request)

    return call_requests
Example #8
0
    def task_header(self, task):

        handlers = {
            tags.action_tag(tags.ACTION_BIND):
            self._render_bind_header,
            tags.action_tag(tags.ACTION_AGENT_BIND):
            self._render_agent_bind_header,
        }

        # There will be exactly 1 action tag for each task (multiple resource tags)
        action_tags = [t for t in task.tags if tags.is_action_tag(t)]
        action_tag = action_tags[0]

        handler = handlers[action_tag]
        handler()
Example #9
0
    def post(self, request):
        """
        Creates an async task to regenerate content applicability data for given consumers.

        body {consumer_criteria:<dict>}

        :param request: WSGI request object
        :type request: django.core.handlers.wsgi.WSGIRequest

        :raises MissingValue: if some parameters are missing
        :raises InvalidValue: if some parameters are invalid
        :raises OperationPostponed: when an async operation is performed.
        """

        body = request.body_as_json
        consumer_criteria = body.get('consumer_criteria', None)
        if consumer_criteria is None:
            raise MissingValue('consumer_criteria')
        try:
            consumer_criteria = Criteria.from_client_input(consumer_criteria)
        except:
            raise InvalidValue('consumer_criteria')

        task_tags = [tags.action_tag('content_applicability_regeneration')]
        async_result = regenerate_applicability_for_consumers.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_PROFILE_APPLICABILITY_TYPE,
            tags.RESOURCE_ANY_ID, (consumer_criteria.as_dict(), ),
            tags=task_tags)
        raise OperationPostponed(async_result)
Example #10
0
    def POST(self):

        # Params
        params = self.params()
        login = params.get('login', None)
        resource = params.get('resource', None)
        operation_names = params.get('operations', None)

        _check_invalid_params({
            'login': login,
            'resource': resource,
            'operation_names': operation_names
        })

        operations = _get_operations(operation_names)

        # Grant permission synchronously
        permission_manager = managers.permission_manager()
        tags = [
            resource_tag(dispatch_constants.RESOURCE_PERMISSION_TYPE,
                         resource),
            resource_tag(dispatch_constants.RESOURCE_USER_TYPE, login),
            action_tag('grant_permission_to_user')
        ]

        call_request = CallRequest(permission_manager.grant,
                                   [resource, login, operations],
                                   tags=tags)
        call_request.reads_resource(dispatch_constants.RESOURCE_USER_TYPE,
                                    login)
        call_request.updates_resource(
            dispatch_constants.RESOURCE_PERMISSION_TYPE, resource)

        return self.ok(execution.execute_sync(call_request))
Example #11
0
def queue_download_deferred():
    """
    Queue a task to download all content units with entries in the DeferredDownload
    collection.
    """
    tags = [pulp_tags.action_tag(pulp_tags.ACTION_DEFERRED_DOWNLOADS_TYPE)]
    return download_deferred.apply_async(tags=tags)
Example #12
0
    def test_run(self, mock_search, mock_poll):
        """
        Test the run() method when there is one publish Task. It should call poll() on it.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id}
        # Simulate a task already running
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task_data['state'] = 'running'
        task = responses.Task(task_data)
        mock_search.return_value = [task]

        self.command.run(**data)

        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_PUBLISH_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        mock_poll.assert_called_once_with([task], data)
Example #13
0
    def test_bind(self, *mocks):

        mock_agent = mocks[0]
        mock_context = mocks[1]
        mock_factory = mocks[2]
        mock_bindings = mocks[3]
        mock_task_status = mocks[4]
        mock_uuid = mocks[5]

        consumer = {'id': '1234'}
        mock_consumer_manager = Mock()
        mock_consumer_manager.get_consumer = Mock(return_value=consumer)
        mock_factory.consumer_manager = Mock(return_value=mock_consumer_manager)

        binding = {}
        mock_bind_manager = Mock()
        mock_bind_manager.get_bind = Mock(return_value=binding)
        mock_bind_manager.action_pending = Mock()
        mock_factory.consumer_bind_manager = Mock(return_value=mock_bind_manager)

        agent_bindings = []
        mock_bindings.return_value = agent_bindings

        task_id = '2345'
        mock_context.return_value = {}
        mock_uuid.return_value = task_id

        # test manager

        repo_id = '100'
        distributor_id = '200'
        options = {}
        agent_manager = AgentManager()
        agent_manager.bind(consumer['id'], repo_id, distributor_id, options)

        # validations

        task_tags = [
            tags.resource_tag(tags.RESOURCE_CONSUMER_TYPE, consumer['id']),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id),
            tags.action_tag(tags.ACTION_AGENT_BIND)
        ]

        mock_consumer_manager.get_consumer.assert_called_with(consumer['id'])
        mock_bind_manager.get_bind.assert_called_with(consumer['id'], repo_id, distributor_id)
        mock_bindings.assert_called_with([binding])

        mock_context.assert_called_with(
            consumer,
            task_id=task_id,
            action='bind',
            consumer_id=consumer['id'],
            repo_id=repo_id,
            distributor_id=distributor_id)

        mock_task_status.assert_called_with(task_id, 'agent', tags=task_tags)
        mock_agent.bind.assert_called_with(mock_context.return_value, agent_bindings, options)
        mock_bind_manager.action_pending.assert_called_with(
            consumer['id'], repo_id, distributor_id, Bind.Action.BIND, task_id)
Example #14
0
    def test_task_header_action_tag_only(self):
        task = Task({})
        task.tags = [tags.action_tag(tags.ACTION_UPDATE_DISTRIBUTOR)]

        self.command.task_header(task)

        self.assertEqual(self.prompt.get_write_tags(), [tags.ACTION_UPDATE_DISTRIBUTOR])
Example #15
0
    def POST(self, repo_id):
        """
        Import an uploaded unit into the given repository.

        :param repo_id: The id of the repository the upload should be imported into
        :type  repo_id: basestring
        :return:        A json serialized dictionary with two keys. 'success_flag' indexes a boolean
                        value that indicates whether the import was successful, and 'summary' will
                        contain the summary as reported by the Importer.
        :rtype:         basestring
        """
        # Collect user input
        params = self.params()
        upload_id = params['upload_id']
        unit_type_id = params['unit_type_id']
        unit_key = params['unit_key']
        unit_metadata = params.pop('unit_metadata', None)
        override_config = params.pop('override_config', None)

        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.action_tag('import_upload')
        ]
        async_result = import_uploaded_unit.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE,
            repo_id, [
                repo_id, unit_type_id, unit_key, unit_metadata, upload_id,
                override_config
            ],
            tags=task_tags)
        raise exceptions.OperationPostponed(async_result)
Example #16
0
    def POST(self, repo_id):

        params = self.params()
        criteria = params.get('criteria', None)

        if criteria is not None:
            try:
                criteria = UnitAssociationCriteria.from_client_input(criteria)
            except:
                _logger.error('Error parsing unassociation criteria [%s]' %
                              criteria)
                raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.action_tag('unassociate')
        ]
        async_result = unassociate_by_criteria.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE,
            repo_id, [
                repo_id, criteria, RepoContentUnit.OWNER_TYPE_USER,
                manager_factory.principal_manager().get_principal()['login']
            ],
            tags=task_tags)
        raise exceptions.OperationPostponed(async_result)
Example #17
0
    def PUT(self, repo_id, importer_id):
        # Raise a MissingResource exception if the repo or the importer doesn't exist
        importer_manager = manager_factory.repo_importer_manager()
        importer = importer_manager.get_importer(repo_id)
        if importer['id'] != importer_id:
            raise exceptions.MissingResource(importer_id=importer_id)

        if not plugin_api.is_valid_importer(importer_id):
            raise exceptions.PulpCodedValidationException(
                error_code=error_codes.PLP1008)

        params = self.params()
        importer_config = params.get('importer_config', None)

        if importer_config is None:
            _logger.error(
                'Missing configuration updating importer for repository [%s]' %
                repo_id)
            raise exceptions.MissingValue(['importer_config'])

        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_IMPORTER_TYPE,
                              importer_id),
            tags.action_tag('update_importer')
        ]
        async_result = update_importer_config.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE,
            repo_id, [repo_id], {'importer_config': importer_config},
            tags=task_tags)
        raise exceptions.OperationPostponed(async_result)
Example #18
0
    def POST(self, repo_id):
        """
        Associate an importer with a repository.

        This will validate that the repository exists and that there is an importer with the
        importer_type_id given. However, the importer configuration validation only checks the
        provided values against a standard set of importer configuration keys. The importer
        specific validation is called on association, so any type specific configuration will
        be validated later. This means the spawned task could fail with a validation error.

        :param repo_id: the repository to associate the importer with
        :type  repo_id: str
        """
        params = self.params()
        importer_type = params.get('importer_type_id', None)
        config = params.get('importer_config', None)

        # This call will raise the appropriate exception
        importer_manager = manager_factory.repo_importer_manager()
        importer_manager.validate_importer_config(repo_id, importer_type,
                                                  config)

        # Note: If an importer exists, it's removed, so no need to handle 409s.
        # Note: If the plugin raises an exception during initialization, let it
        #  bubble up and be handled like any other 500.

        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.action_tag('add_importer')
        ]
        async_result = set_importer.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE,
            repo_id, [repo_id, importer_type], {'repo_plugin_config': config},
            tags=task_tags)
        raise exceptions.OperationPostponed(async_result)
Example #19
0
def publish_itinerary(repo_id, distributor_id, overrides=None):
    """
    Create an itinerary for repo publish.
    @param repo_id: id of the repo to publish
    @type repo_id: str
    @param distributor_id: id of the distributor to use for the repo publish
    @type distributor_id: str
    @param overrides: dictionary of options to pass to the publish manager
    @type overrides: dict or None
    @return: list of call requests
    @rtype: list
    """

    repo_publish_manager = manager_factory.repo_publish_manager()
    weight = pulp_config.config.getint('tasks', 'publish_weight')
    tags = [
        resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
        action_tag('publish')
    ]

    call_request = CallRequest(repo_publish_manager.publish,
                               [repo_id, distributor_id],
                               {'publish_config_override': overrides},
                               weight=weight,
                               tags=tags,
                               archive=True)

    call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE,
                                  repo_id)

    return [call_request]
Example #20
0
    def PUT(self, consumer_id, content_type):
        """
        Update the association of a profile with a consumer by content type ID.
        @param consumer_id: A consumer ID.
        @type consumer_id: str
        @param content_type: A content unit type ID.
        @type content_type: str
        @return: The updated model object:
            {consumer_id:<str>, content_type:<str>, profile:<dict>}
        @rtype: dict
        """
        body = self.params()
        profile = body.get('profile')

        manager = managers.consumer_profile_manager()
        tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id),
                resource_tag(dispatch_constants.RESOURCE_CONTENT_UNIT_TYPE, content_type),
                action_tag('profile_update')]

        call_request = CallRequest(manager.update,
                                   [consumer_id, content_type],
                                   {'profile': profile},
                                   tags=tags,
                                   weight=0,
                                   kwarg_blacklist=['profile'])
        call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id)

        call_report = CallReport.from_call_request(call_request)
        call_report.serialize_result = False

        consumer = execution.execute_sync(call_request, call_report)
        link = serialization.link.child_link_obj(consumer_id, content_type)
        consumer.update(link)

        return self.ok(consumer)
Example #21
0
    def POST(self):
        body = self.params()
        id = body.get('id')
        display_name = body.get('display_name')
        description = body.get('description')
        notes = body.get('notes')

        manager = managers.consumer_manager()
        args = [id, display_name, description, notes]
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, id),
                action_tag('create')]

        call_request = CallRequest(manager.register,
                                   args,
                                   weight=weight,
                                   tags=tags)
        call_request.creates_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, id)

        call_report = CallReport.from_call_request(call_request)
        call_report.serialize_result = False

        consumer = execution.execute_sync(call_request, call_report)
        consumer.update({'_href': serialization.link.child_link_obj(consumer['id'])})
        return self.created(consumer['_href'], consumer)
Example #22
0
def queue_sync_with_auto_publish(repo_id,
                                 overrides=None,
                                 scheduled_call_id=None):
    """
    Sync a repository and upon successful completion, publish any distributors that are configured
    for auto publish.

    :param repo_id: id of the repository to create a sync call request list for
    :type repo_id:  str
    :param overrides: dictionary of configuration overrides for this sync
    :type overrides:  dict or None
    :param scheduled_call_id: id of scheduled call that dispatched this task
    :type  scheduled_call_id: str

    :return: result containing the details of the task executed and any spawned tasks
    :rtype:  pulp.server.async.tasks.TaskResult
    """
    kwargs = {
        'repo_id': repo_id,
        'sync_config_override': overrides,
        'scheduled_call_id': scheduled_call_id
    }
    tags = [
        resource_tag(RESOURCE_REPOSITORY_TYPE, repo_id),
        action_tag('sync')
    ]
    result = sync.apply_async_with_reservation(RESOURCE_REPOSITORY_TYPE,
                                               repo_id,
                                               tags=tags,
                                               kwargs=kwargs)
    return result
Example #23
0
    def test_run_already_in_progress(self, mock_sync, mock_search, poll):
        """
        Test the run() method when there is an existing sync Task on the server.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id, polling.FLAG_BACKGROUND.keyword: False,
                sp.FLAG_FORCE_FULL_SYNC.keyword: False}
        # Simulate a task already running
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task_data['state'] = 'running'
        task = responses.Task(task_data)
        mock_search.return_value = [task]

        self.command.run(**data)

        self.assertEqual(mock_sync.call_count, 0)
        sync_tasks = poll.mock_calls[0][1][0]
        expected_search_query = {
            'state': {'$nin': responses.COMPLETED_STATES},
            'tags': {'$all': [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                              tags.action_tag(tags.ACTION_SYNC_TYPE)]}}
        mock_search.assert_called_once_with(filters=expected_search_query)
        poll.assert_called_once_with(sync_tasks, data)
        write_tags = self.prompt.get_write_tags()
        self.assertEqual(2, len(write_tags))
        self.assertEqual(write_tags[1], 'in-progress')
Example #24
0
def queue_publish(repo_id,
                  distributor_id,
                  overrides=None,
                  scheduled_call_id=None):
    """
    Queue a repo publish task.

    :param repo_id: id of the repo to publish
    :type  repo_id: str
    :param distributor_id: publish the repo with this distributor
    :type  distributor_id: str
    :param overrides: dictionary of options to pass to the publish task
    :type  overrides: dict or None
    :param scheduled_call_id: id of scheduled call that dispatched this task
    :type  scheduled_call_id: str

    :return: task result object
    :rtype: pulp.server.async.tasks.TaskResult
    """
    kwargs = {
        'repo_id': repo_id,
        'dist_id': distributor_id,
        'publish_config_override': overrides,
        'scheduled_call_id': scheduled_call_id
    }
    tags = [
        resource_tag(RESOURCE_REPOSITORY_TYPE, repo_id),
        action_tag('publish')
    ]
    return publish.apply_async_with_reservation(RESOURCE_REPOSITORY_TYPE,
                                                repo_id,
                                                tags=tags,
                                                kwargs=kwargs)
Example #25
0
def queue_update(distributor, config, delta):
    """
    Dispatch a task to update a distributor.

    :param distributor: distributor to be updated
    :type  distributor: pulp.server.db.model.Distributor
    :param config: A configuration dictionary for a distributor instance. The contents of this dict
                   depends on the type of distributor. Values of None will remove they key from the
                   config. Keys ommited from this dictionary will remain unchanged.
    :type  config: dict
    :param delta: A dictionary used to change conf values for a distributor instance. This currently
                  only supports the 'auto_publish' keyword, which should have a value of type bool
    :type  delta: dict or None

    :return: An AsyncResult instance as returned by Celery's apply_async
    :rtype:  celery.result.AsyncResult
    """
    task_tags = [
        tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, distributor.repo_id),
        tags.resource_tag(tags.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor.distributor_id),
        tags.action_tag('update_distributor')
    ]
    async_result = update.apply_async_with_reservation(
        tags.RESOURCE_REPOSITORY_TYPE, distributor.repo_id,
        [distributor.repo_id, distributor.distributor_id, config, delta],
        tags=task_tags)
    return async_result
Example #26
0
def consumer_content_update_itinerary(consumer_id, units, options):
    """
    Create an itinerary for consumer content update.
    @param consumer_id: unique id of the consumer
    @type consumer_id: str
    @param units: units to update
    @type units: list or tuple
    @param options: options to pass to the update manager
    @type options: dict or None
    @return: list of call requests
    @rtype: list
    """
    manager = managers_factory.consumer_agent_manager()
    args = [consumer_id]
    kwargs = {'units': units, 'options': options}
    weight = pulp_config.config.getint('tasks', 'consumer_content_weight')
    tags = [
        resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id),
        action_tag('unit_update')
    ]
    call_request = CallRequest(manager.update_content,
                               args,
                               kwargs,
                               weight=weight,
                               tags=tags,
                               archive=True,
                               asynchronous=True)
    call_request.add_control_hook(dispatch_constants.CALL_CANCEL_CONTROL_HOOK,
                                  cancel_agent_request)
    call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE,
                                consumer_id)
    return [call_request]
Example #27
0
    def test_run_no_status(self, mock_search, mock_poll):
        """
        Test the run() method when there are no current publish Tasks to attach to. It
        should query the server and inform the user that there are no publish operations to
        report.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id}
        # Simulate there being no publish tasks
        mock_search.return_value = []

        self.command.run(**data)

        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_PUBLISH_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        self.assertEqual(0, mock_poll.call_count)
        self.assertEqual(self.prompt.get_write_tags(), [TAG_TITLE, 'no-tasks'])
Example #28
0
    def POST(self, repo_id):

        # Params (validation will occur in the manager)
        params = self.params()
        importer_type = params.get('importer_type_id', None)
        importer_config = params.get('importer_config', None)

        if importer_type is None:
            _LOG.exception('Missing importer type adding importer to repository [%s]' % repo_id)
            raise exceptions.MissingValue(['importer_type'])

        # Note: If an importer exists, it's removed, so no need to handle 409s.
        # Note: If the plugin raises an exception during initialization, let it
        #  bubble up and be handled like any other 500.

        importer_manager = manager_factory.repo_importer_manager()
        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}}
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                action_tag('add_importer')]

        call_request = CallRequest(importer_manager.set_importer,
                                   [repo_id, importer_type, importer_config],
                                   resources=resources,
                                   weight=weight,
                                   tags=tags)
        return execution.execute_sync_created(self, call_request, 'importer')
Example #29
0
File: users.py Project: sahwar/pulp
    def POST(self):

        # Pull all the user data
        user_data = self.params()
        login = user_data.get('login', None)
        password = user_data.get('password', None)
        name = user_data.get('name', None)

        # Creation
        manager = managers.user_manager()
        args = [login]
        kwargs = {'password': password, 'name': name}
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [
            resource_tag(dispatch_constants.RESOURCE_USER_TYPE, login),
            action_tag('create')
        ]
        call_request = CallRequest(manager.create_user,
                                   args,
                                   kwargs,
                                   weight=weight,
                                   tags=tags,
                                   kwarg_blacklist=['password'])
        call_request.creates_resource(dispatch_constants.RESOURCE_USER_TYPE,
                                      login)
        user = execution.execute_sync(call_request)
        user_link = serialization.link.child_link_obj(login)
        user.update(user_link)

        # Grant permissions
        permission_manager = managers.permission_manager()
        permission_manager.grant_automatic_permissions_for_resource(
            user_link['_href'])

        return self.created(login, user)
Example #30
0
    def POST(self, repo_id, importer_id):
        importer_manager = manager_factory.repo_importer_manager()
        importer = importer_manager.get_importer(repo_id)
        if importer_id != importer['id']:
            raise exceptions.MissingResource(importer=importer_id)

        schedule_options = self.params()
        sync_options = {'override_config': schedule_options.pop('override_config', {})}

        schedule_manager = manager_factory.schedule_manager()
        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION},
                     dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE: {importer_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}}
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_IMPORTER_TYPE, importer_id),
                action_tag('create_sync_schedule')]
        call_request = CallRequest(schedule_manager.create_sync_schedule,
                                   [repo_id, importer_id, sync_options, schedule_options],
                                   resources=resources,
                                   weight=weight,
                                   tags=tags,
                                   archive=True)
        schedule_id = execution.execute_sync(call_request)

        scheduler = dispatch_factory.scheduler()
        schedule = scheduler.get(schedule_id)
        obj = serialization.dispatch.scheduled_sync_obj(schedule)
        obj.update(serialization.link.child_link_obj(schedule_id))
        return self.created(obj['_href'], obj)
Example #31
0
    def POST(self):

        # Params
        params = self.params()
        role_id = params.get('role_id', None)
        resource = params.get('resource', None)
        operation_names = params.get('operations', None)

        _check_invalid_params({
            'role_id': role_id,
            'resource': resource,
            'operation_names': operation_names
        })

        operations = _get_operations(operation_names)

        # Grant permission synchronously
        role_manager = managers.role_manager()

        tags = [
            resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id),
            action_tag('remove_permission_from_role')
        ]

        call_request = CallRequest(role_manager.remove_permissions_from_role,
                                   [role_id, resource, operations],
                                   tags=tags)
        call_request.updates_resource(dispatch_constants.RESOURCE_ROLE_TYPE,
                                      role_id)

        return self.ok(execution.execute_sync(call_request))
Example #32
0
    def POST(self, repo_id):

        # Params (validation will occur in the manager)
        params = self.params()
        distributor_type = params.get('distributor_type_id', None)
        distributor_config = params.get('distributor_config', None)
        distributor_id = params.get('distributor_id', None)
        auto_publish = params.get('auto_publish', False)

        # Update the repo
        distributor_manager = manager_factory.repo_distributor_manager()

        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}}
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                action_tag('add_distributor')]
        if distributor_id is not None:
            resources.update({dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_CREATE_OPERATION}})
            tags.append(resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id))
        call_request = CallRequest(distributor_manager.add_distributor,
                                   [repo_id, distributor_type, distributor_config, auto_publish, distributor_id],
                                   resources=resources,
                                   weight=weight,
                                   tags=tags)
        return execution.execute_created(self, call_request, distributor_id)
Example #33
0
def _get_publish_tasks(resource_id, context):
    """
    Get the list of currently running publish tasks for the given repo_group id.

    :param resource_id:     The id of the resource to retrieve the task id for. This should be a
                            repo or group id
    :type  resource_id:     str
    :param context:         The client context is used when fetching existing task ids
    :type  context:         pulp.client.extensions.core.ClientContext

    :return: The Task, if it exists. If it does not, this will return None
    :rtype:  list of pulp.bindings.responses.Task
    """
    tags = [
        tag_utils.resource_tag(tag_utils.RESOURCE_REPOSITORY_GROUP_TYPE,
                               resource_id),
        tag_utils.action_tag(tag_utils.ACTION_PUBLISH_TYPE)
    ]
    criteria = {
        'filters': {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': tags
            }
        }
    }
    return context.server.tasks_search.search(**criteria)
Example #34
0
    def POST(self, repo_id, distributor_id):
        distributor_manager = manager_factory.repo_distributor_manager()
        distributor_manager.get_distributor(repo_id, distributor_id)

        schedule_options = self.params()
        publish_options = {'override_config': schedule_options.pop('override_config', {})}

        schedule_manager = manager_factory.schedule_manager()
        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION},
                     dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}}
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id),
                action_tag('create_publish_schedule')]
        call_request = CallRequest(schedule_manager.create_publish_schedule,
                                   [repo_id, distributor_id, publish_options, schedule_options],
                                   resources=resources,
                                   weight=weight,
                                   tags=tags,
                                   archive=True)
        schedule_id = execution.execute_sync(call_request)

        scheduler = dispatch_factory.scheduler()
        schedule = scheduler.get(schedule_id)
        obj = serialization.dispatch.scheduled_publish_obj(schedule)
        obj.update(serialization.link.child_link_obj(schedule_id))
        return self.created(obj['_href'], obj)
Example #35
0
    def post(self, request, consumer_id):
        """
        Creates an async task to regenerate content applicability data for given consumer.

        :param request: WSGI request object
        :type request: django.core.handlers.wsgi.WSGIRequest
        :param consumer_id: The consumer ID.
        :type consumer_id: str

        :raises MissingResource: if some parameters are missing
        :raises OperationPostponed: when an async operation is performed.
        """

        consumer_query_manager = factory.consumer_query_manager()
        if consumer_query_manager.find_by_id(consumer_id) is None:
            raise MissingResource(consumer_id=consumer_id)
        consumer_criteria = Criteria(filters={'consumer_id': consumer_id})

        task_tags = [
            tags.action_tag('consumer_content_applicability_regeneration')
        ]
        async_result = regenerate_applicability_for_consumers.apply_async_with_reservation(
            tags.RESOURCE_CONSUMER_TYPE,
            consumer_id, (consumer_criteria.as_dict(), ),
            tags=task_tags)
        raise OperationPostponed(async_result)
Example #36
0
    def PUT(self, repo_id, distributor_id, schedule_id):
        distributor_manager = manager_factory.repo_distributor_manager()
        schedule_list = distributor_manager.list_publish_schedules(repo_id, distributor_id)
        if schedule_id not in schedule_list:
            raise exceptions.MissingResource(repo=repo_id, distributor=distributor_id, publish_schedule=schedule_id)

        publish_update = {}
        schedule_update = self.params()
        if 'override_config' in schedule_update:
            publish_update['override_config'] = schedule_update.pop('override_config')

        schedule_manager = manager_factory.schedule_manager()
        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION},
                     dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE: {distributor_id: dispatch_constants.RESOURCE_READ_OPERATION},
                     dispatch_constants.RESOURCE_SCHEDULE_TYPE: {schedule_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}}
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id),
                resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id),
                action_tag('update_publish_schedule')]
        call_request = CallRequest(schedule_manager.update_publish_schedule,
                                   [repo_id, distributor_id, schedule_id, publish_update, schedule_update],
                                   resources=resources,
                                   tags=tags,
                                   archive=True)
        execution.execute(call_request)

        scheduler = dispatch_factory.scheduler()
        schedule = scheduler.get(schedule_id)
        obj = serialization.dispatch.scheduled_publish_obj(schedule)
        obj.update(serialization.link.current_link_obj())
        return self.ok(obj)
Example #37
0
    def POST(self, consumer_id):
        consumer_manager = managers.consumer_manager()
        consumer_manager.get_consumer(consumer_id)

        schedule_data = self.params()
        units = schedule_data.pop('units', None)
        uninstall_options = {'options': schedule_data.pop('options', {})}

        if not units:
            raise MissingValue(['units'])

        schedule_manager = managers.schedule_manager()

        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id),
                action_tag('create_unit_uninstall_schedule')]

        call_request = CallRequest(schedule_manager.create_unit_uninstall_schedule,
                                   [consumer_id, units, uninstall_options, schedule_data],
                                   weight=weight,
                                   tags=tags,
                                   archive=True)
        call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id)

        schedule_id = execution.execute_sync(call_request)

        scheduler = dispatch_factory.scheduler()
        scheduled_call = scheduler.get(schedule_id)

        scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call)
        scheduled_obj.update(serialization.link.child_link_obj(schedule_id))
        return self.created(scheduled_obj['_href'], scheduled_obj)
Example #38
0
    def POST(self, dest_repo_id):

        # Params
        params = self.params()
        source_repo_id = params.get('source_repo_id', None)
        overrides = params.get('override_config', None)

        if source_repo_id is None:
            raise exceptions.MissingValue(['source_repo_id'])

        criteria = params.get('criteria', None)
        if criteria is not None:
            try:
                criteria = UnitAssociationCriteria.from_client_input(criteria)
            except:
                _LOG.exception('Error parsing association criteria [%s]' % criteria)
                raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        association_manager = manager_factory.repo_unit_association_manager()
        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {source_repo_id: dispatch_constants.RESOURCE_READ_OPERATION,
                                                                   dest_repo_id: dispatch_constants.RESOURCE_UPDATE_OPERATION}}
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, dest_repo_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, source_repo_id),
                action_tag('associate')]
        call_request = CallRequest(association_manager.associate_from_repo,
                                   [source_repo_id, dest_repo_id],
                                   {'criteria': criteria, 'import_config_override': overrides},
                                   resources=resources,
                                   tags=tags,
                                   archive=True)
        return execution.execute_async(self, call_request)
Example #39
0
    def PUT(self, consumer_id, schedule_id):
        consumer_manager = managers.consumer_manager()
        consumer_manager.get_consumer(consumer_id)

        schedule_data = self.params()
        install_options = None
        units = schedule_data.pop('units', None)

        if 'options' in schedule_data:
            install_options = {'options': schedule_data.pop('options')}

        schedule_manager = managers.schedule_manager()

        tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id),
                resource_tag(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id),
                action_tag('update_unit_uninstall_schedule')]

        call_request = CallRequest(schedule_manager.update_unit_uninstall_schedule,
                                   [consumer_id, schedule_id, units, install_options, schedule_data],
                                   tags=tags,
                                   archive=True)
        call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id)
        call_request.updates_resource(dispatch_constants.RESOURCE_SCHEDULE_TYPE, schedule_id)

        execution.execute(call_request)

        scheduler = dispatch_factory.scheduler()
        scheduled_call = scheduler.get(schedule_id)

        scheduled_obj = serialization.dispatch.scheduled_unit_management_obj(scheduled_call)
        scheduled_obj.update(serialization.link.current_link_obj())
        return self.ok(scheduled_obj)
Example #40
0
    def test_search(self, mock_search):
        """
        Test the search method. All it really does is call the superclass search() method, and turn
        the results into Tasks.
        """
        connection = mock.MagicMock()
        repo_id = 'some_repo'
        repo_tag = tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id)
        sync_tag = tags.action_tag(tags.ACTION_SYNC_TYPE)
        search_criteria = {'filters': {'state': {'$nin': responses.COMPLETED_STATES},
                                       'tags': {'$all': [repo_tag, sync_tag]}}}
        response_body = [{u'task_id': u'3fff3e01-ba48-414c-a4bb-daaed7a0d2d8',
                          u'tags': [u'pulp:repository:%s' % repo_id, u'pulp:action:sync'],
                          u'start_time': 1393098484,
                          u'queue': u'*****@*****.**',
                          u'state': u'running', u'id': {u'$oid': u'5308fef46b565fd6740199ae'}}]
        mock_search.return_value = response_body

        results = tasks.TaskSearchAPI(connection).search(**search_criteria)

        mock_search.assert_called_once_with(**search_criteria)
        self.assertEqual(type(results), list)
        self.assertEqual(len(results), 1)
        task = results[0]
        self.assertEqual(type(task), responses.Task)
        self.assertEqual(task.task_id, response_body[0]['task_id'])
        self.assertEqual(task.tags, response_body[0]['tags'])
        self.assertEqual(task.start_time, response_body[0]['start_time'])
        self.assertEqual(task.state, response_body[0]['state'])
Example #41
0
    def test_run(self, mock_sync, mock_search, poll):
        """
        Test the run() method when there is not an existing sync Task on the server.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id, polling.FLAG_BACKGROUND.keyword: False,
                sp.FLAG_FORCE_FULL_SYNC.keyword: False}
        # No tasks are running
        mock_search.return_value = []
        # responses.Response from the sync call
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task = responses.Task(task_data)
        mock_sync.return_value = responses.Response(202, task)

        self.command.run(**data)

        mock_sync.assert_called_once_with(repo_id, None)
        sync_tasks = poll.mock_calls[0][1][0]
        poll.assert_called_once_with(sync_tasks, data)
        expected_search_query = {
            'state': {'$nin': responses.COMPLETED_STATES},
            'tags': {'$all': [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                              tags.action_tag(tags.ACTION_SYNC_TYPE)]}}
        mock_search.assert_called_once_with(filters=expected_search_query)
        self.assertEqual(self.prompt.get_write_tags(), [TAG_TITLE])
Example #42
0
    def test_run_already_in_progress(self, mock_sync, mock_search, poll):
        """
        Test the run() method when there is an existing sync Task on the server.
        """
        repo_id = 'test-repo'
        data = {
            options.OPTION_REPO_ID.keyword: repo_id,
            polling.FLAG_BACKGROUND.keyword: False
        }
        # Simulate a task already running
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task_data['state'] = 'running'
        task = responses.Task(task_data)
        mock_search.return_value = [task]

        self.command.run(**data)

        self.assertEqual(mock_sync.call_count, 0)
        sync_tasks = poll.mock_calls[0][1][0]
        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_SYNC_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        poll.assert_called_once_with(sync_tasks, data)
        write_tags = self.prompt.get_write_tags()
        self.assertEqual(2, len(write_tags))
        self.assertEqual(write_tags[1], 'in-progress')
Example #43
0
    def test_run_already_in_progress(self, mock_publish, mock_search, mock_poll):
        """
        Test the run() method when thre is already an incomplete publish operation.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id, polling.FLAG_BACKGROUND.keyword: False,
                sp.FLAG_FORCE_FULL_PUBLISH.keyword: False}
        # Simulate a task already running
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task_data['state'] = 'running'
        task = responses.Task(task_data)
        mock_search.return_value = [task]

        self.command.run(**data)

        # Publish shouldn't get called again since it's already running
        self.assertEqual(mock_publish.call_count, 0)
        expected_search_query = {
            'state': {'$nin': responses.COMPLETED_STATES},
            'tags': {'$all': [tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                              tags.action_tag(tags.ACTION_PUBLISH_TYPE)]}}
        mock_search.assert_called_once_with(filters=expected_search_query)
        mock_poll.assert_called_once_with([task], data)
        write_tags = self.prompt.get_write_tags()
        self.assertEqual(2, len(write_tags))
        self.assertEqual(write_tags[1], 'in-progress')
Example #44
0
    def test_run_no_status(self, mock_search, mock_poll):
        """
        Test run() when there are no sync_tasks on the server.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id}
        # No tasks are running
        mock_search.return_value = []

        self.command.run(**data)

        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_SYNC_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        self.assertEqual(0, mock_poll.call_count)
        self.assertEqual(self.prompt.get_write_tags(), [TAG_TITLE, 'no-tasks'])
Example #45
0
    def POST(self, repo_id):
        # Params
        params = self.params()
        query = params.get('criteria', {})
        options = params.get('options', {})
        timeout = params.get('timeout', 60)

        try:
            criteria = UnitAssociationCriteria.from_client_input(query)
        except:
            _LOG.exception('Error parsing association criteria [%s]' % query)
            raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        try:
            timeout = int(timeout)
        except ValueError:
            raise exceptions.InvalidValue(['timeout']), None, sys.exc_info()[2]

        # Coordinator configuration
        resources = {dispatch_constants.RESOURCE_REPOSITORY_TYPE: {repo_id: dispatch_constants.RESOURCE_READ_OPERATION}}
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
                action_tag('resolve_dependencies')]

        dependency_manager = manager_factory.dependency_manager()
        call_request = CallRequest(dependency_manager.resolve_dependencies_by_criteria,
                                   [repo_id, criteria, options],
                                   resources=resources, tags=tags, archive=True)

        return execution.execute_sync_ok(self, call_request, timeout=timedelta(seconds=timeout))
Example #46
0
    def post(self, request, repo_group_id):
        """
        Dispatch a task to publish content from the repo group using the distributor specified by
        the params.

        :param request: WSGI request object
        :type  request: django.core.handlers.wsgi.WSGIRequest
        :param repo_group_id: repo group to publish
        :type  repo_group_id: str

        :raises pulp_exceptions.MissingValue if 'id' is not passed in the body
        :raises pulp_exceptions.OperationPosponed: dispatch a task
        """
        params = request.body_as_json
        distributor_id = params.get('id', None)
        overrides = params.get('override_config', None)
        if distributor_id is None:
            raise pulp_exceptions.MissingValue(['id'])
        # If a repo group does not exist, get_group raises a MissingResource exception
        manager = managers_factory.repo_group_query_manager()
        manager.get_group(repo_group_id)
        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_GROUP_TYPE,
                              repo_group_id),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE,
                              distributor_id),
            tags.action_tag('publish')
        ]
        async_result = repo_group_publish.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_GROUP_TYPE,
            repo_group_id,
            args=[repo_group_id, distributor_id],
            kwargs={'publish_config_override': overrides},
            tags=task_tags)
        raise pulp_exceptions.OperationPostponed(async_result)
Example #47
0
    def test_run(self, mock_sync, mock_search, poll):
        """
        Test the run() method when there is not an existing sync Task on the server.
        """
        repo_id = 'test-repo'
        data = {
            options.OPTION_REPO_ID.keyword: repo_id,
            polling.FLAG_BACKGROUND.keyword: False
        }
        # No tasks are running
        mock_search.return_value = []
        # responses.Response from the sync call
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task = responses.Task(task_data)
        mock_sync.return_value = responses.Response(202, task)

        self.command.run(**data)

        mock_sync.assert_called_once_with(repo_id, None)
        sync_tasks = poll.mock_calls[0][1][0]
        poll.assert_called_once_with(sync_tasks, data)
        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_SYNC_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        self.assertEqual(self.prompt.get_write_tags(), [TAG_TITLE])
Example #48
0
    def POST(self, repo_group_id):
        # Params (validation will occur in the manager)
        params = self.params()
        distributor_type_id = params.get('distributor_type_id', None)
        distributor_config = params.get('distributor_config', None)
        distributor_id = params.get('distributor_id', None)

        distributor_manager = managers_factory.repo_group_distributor_manager()

        resources = {dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE : {
            repo_group_id : dispatch_constants.RESOURCE_UPDATE_OPERATION
        }}
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id),
                action_tag('add_distributor')]
        if distributor_id is not None:
            tags.append(resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id))

        call_request = CallRequest(distributor_manager.add_distributor,
                                   [repo_group_id, distributor_type_id, distributor_config, distributor_id],
                                   resources=resources,
                                   weight=weight,
                                   tags=tags)
        created = execution.execute(call_request)

        href = serialization.link.child_link_obj(created['id'])
        created.update(href)

        return self.created(href['_href'], created)
Example #49
0
    def test_run(self, mock_search, poll):
        """
        Test the run() method when the server has one incomplete sync task.
        """
        repo_id = 'test-repo'
        data = {options.OPTION_REPO_ID.keyword: repo_id}
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task_data['state'] = 'running'
        task = responses.Task(task_data)
        mock_search.return_value = [task]

        self.command.run(**data)

        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_SYNC_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        sync_tasks = poll.mock_calls[0][1][0]
        poll.assert_called_once_with(sync_tasks, data)
Example #50
0
    def DELETE(self, repo_group_id, distributor_id):
        params = self.params()
        force = params.get('force', False)

        distributor_manager = managers_factory.repo_group_distributor_manager()

        resources = {
            dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE :
                    {repo_group_id : dispatch_constants.RESOURCE_UPDATE_OPERATION},
            dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE :
                    {distributor_id : dispatch_constants.RESOURCE_DELETE_OPERATION},
                     }
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id),
                action_tag('remove_distributor')
               ]
        call_request = CallRequest(distributor_manager.remove_distributor,
                                   args=[repo_group_id, distributor_id],
                                   kwargs={'force' : force},
                                   resources=resources,
                                   tags=tags,
                                   archive=True)

        execution.execute(call_request)
        return self.ok(None)
Example #51
0
    def test_run_background(self, mock_publish, mock_search, mock_poll):
        """
        Test run() with the --bg flag is set.
        """
        repo_id = 'test-repo'
        data = {
            options.OPTION_REPO_ID.keyword: repo_id,
            polling.FLAG_BACKGROUND.keyword: False
        }
        # No tasks are running
        mock_search.return_value = []
        # responses.Response from the sync call
        task_data = copy.copy(CALL_REPORT_TEMPLATE)
        task = responses.Task(task_data)
        mock_publish.return_value = responses.Response(202, task)

        self.command.run(**data)

        mock_publish.assert_called_once_with(repo_id,
                                             self.command.distributor_id, None)
        expected_search_query = {
            'state': {
                '$nin': responses.COMPLETED_STATES
            },
            'tags': {
                '$all': [
                    tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
                    tags.action_tag(tags.ACTION_PUBLISH_TYPE)
                ]
            }
        }
        mock_search.assert_called_once_with(filters=expected_search_query)
        mock_poll.assert_called_once_with([task], data)
Example #52
0
    def PUT(self, repo_group_id, distributor_id):
        params = self.params()

        distributor_config = params.get('distributor_config', None)

        if distributor_config is None:
            raise pulp_exceptions.MissingValue(['distributor_config'])

        distributor_manager = managers_factory.repo_group_distributor_manager()

        resources = {
            dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE :
                    {repo_group_id : dispatch_constants.RESOURCE_UPDATE_OPERATION},
            dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE :
                    {distributor_id : dispatch_constants.RESOURCE_UPDATE_OPERATION},
            }
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id),
                action_tag('update_distributor')
        ]

        call_request = CallRequest(distributor_manager.update_distributor_config,
                                   args=[repo_group_id, distributor_id, distributor_config],
                                   resources=resources,
                                   tags=tags,
                                   archive=True)

        result = execution.execute(call_request)

        href = serialization.link.current_link_obj()
        result.update(href)

        return self.ok(result)
Example #53
0
    def test_sync_action(self):
        """
        Test with action set to 'sync'.
        """
        context = mock.MagicMock()
        a_task = mock.MagicMock()
        context.server.tasks_search.search.return_value = [a_task]
        repo_id = 'some_repo'
        action = 'sync'

        tasks = sp._get_repo_tasks(context, repo_id, action)

        self.assertEqual(tasks, [a_task])
        expected_repo_tag = tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE,
                                              repo_id)
        expected_action_tag = tags.action_tag(tags.ACTION_SYNC_TYPE)
        expected_search_criteria = {
            'filters': {
                'state': {
                    '$nin': responses.COMPLETED_STATES
                },
                'tags': {
                    '$all': [expected_repo_tag, expected_action_tag]
                }
            }
        }
        context.server.tasks_search.search.assert_called_once_with(
            **expected_search_criteria)
Example #54
0
    def POST(self, repo_group_id):
        params = self.params()
        distributor_id = params.get('id', None)
        overrides = params.get('override_config', None)

        if distributor_id is None:
            raise MissingValue(['id'])

        publish_manager = managers_factory.repo_group_publish_manager()

        resources = {
            dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE :
                    {repo_group_id : dispatch_constants.RESOURCE_UPDATE_OPERATION},
            dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE :
                    {distributor_id : dispatch_constants.RESOURCE_UPDATE_OPERATION},
            }
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_TYPE, repo_group_id),
                resource_tag(dispatch_constants.RESOURCE_REPOSITORY_GROUP_DISTRIBUTOR_TYPE, distributor_id),
                action_tag('publish')
        ]
        weight = pulp_config.config.getint('tasks', 'publish_weight')

        call_request = CallRequest(publish_manager.publish,
                                   args=[repo_group_id, distributor_id],
                                   kwargs={'publish_config_override' : overrides},
                                   resources=resources,
                                   tags=tags,
                                   weight=weight,
                                   archive=True)

        return execution.execute_async(self, call_request)
Example #55
0
def queue_download_deferred():
    """
    Queue a task to download all content units with entries in the DeferredDownload
    collection.
    """
    tags = [pulp_tags.action_tag(pulp_tags.ACTION_DEFERRED_DOWNLOADS_TYPE)]
    return download_deferred.apply_async(tags=tags)
Example #56
0
    def POST(self):

        # Pull all the roles data
        role_data = self.params()
        role_id = role_data.get('role_id', None)
        display_name = role_data.get('display_name', None)
        description = role_data.get('description', None)

        # Creation
        manager = managers.role_manager()
        resources = {
            dispatch_constants.RESOURCE_ROLE_TYPE: {
                role_id: dispatch_constants.RESOURCE_CREATE_OPERATION
            }
        }
        args = [role_id, display_name, description]
        weight = pulp_config.config.getint('tasks', 'create_weight')
        tags = [
            resource_tag(dispatch_constants.RESOURCE_ROLE_TYPE, role_id),
            action_tag('create')
        ]
        call_request = CallRequest(manager.create_role,
                                   args,
                                   resources=resources,
                                   weight=weight,
                                   tags=tags)

        role = execution.execute_sync(call_request)
        role_link = serialization.link.child_link_obj(role_id)
        role.update(role_link)

        return self.created(role_id, role)
Example #57
0
 def POST(self):
     orphans = self.params()
     orphan_manager = factory.content_orphan_manager()
     tags = [action_tag('delete_orphans'),
             resource_tag(dispatch_constants.RESOURCE_CONTENT_UNIT_TYPE, 'orphans')]
     call_request = CallRequest(orphan_manager.delete_orphans_by_id, [orphans], tags=tags, archive=True)
     return execution.execute_async(self, call_request)
Example #58
0
    def post(self, request):
        """
        Creates an async task to regenerate content applicability data for given consumers.

        body {consumer_criteria:<dict>}

        :param request: WSGI request object
        :type request: django.core.handlers.wsgi.WSGIRequest

        :raises MissingValue: if some parameters are missing
        :raises InvalidValue: if some parameters are invalid
        :raises OperationPostponed: when an async operation is performed.
        """

        body = request.body_as_json
        consumer_criteria = body.get('consumer_criteria', None)
        if consumer_criteria is None:
            raise MissingValue('consumer_criteria')
        try:
            consumer_criteria = Criteria.from_client_input(consumer_criteria)
        except:
            raise InvalidValue('consumer_criteria')

        task_tags = [tags.action_tag('content_applicability_regeneration')]
        async_result = regenerate_applicability_for_consumers.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_PROFILE_APPLICABILITY_TYPE, tags.RESOURCE_ANY_ID,
            (consumer_criteria.as_dict(),), tags=task_tags)
        raise OperationPostponed(async_result)
Example #59
0
    def test_bind(self, *mocks):

        mock_agent = mocks[0]
        mock_context = mocks[1]
        mock_factory = mocks[2]
        mock_bindings = mocks[3]
        mock_task_status = mocks[4]
        mock_uuid = mocks[5]

        consumer = {'id': '1234'}
        mock_consumer_manager = Mock()
        mock_consumer_manager.get_consumer = Mock(return_value=consumer)
        mock_factory.consumer_manager = Mock(return_value=mock_consumer_manager)

        binding = {}
        mock_bind_manager = Mock()
        mock_bind_manager.get_bind = Mock(return_value=binding)
        mock_bind_manager.action_pending = Mock()
        mock_factory.consumer_bind_manager = Mock(return_value=mock_bind_manager)

        agent_bindings = []
        mock_bindings.return_value = agent_bindings

        task_id = '2345'
        mock_context.return_value = {}
        mock_uuid.return_value = task_id

        # test manager

        repo_id = '100'
        distributor_id = '200'
        options = {}
        agent_manager = AgentManager()
        agent_manager.bind(consumer['id'], repo_id, distributor_id, options)

        # validations

        task_tags = [
            tags.resource_tag(tags.RESOURCE_CONSUMER_TYPE, consumer['id']),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_DISTRIBUTOR_TYPE, distributor_id),
            tags.action_tag(tags.ACTION_AGENT_BIND)
        ]

        mock_consumer_manager.get_consumer.assert_called_with(consumer['id'])
        mock_bind_manager.get_bind.assert_called_with(consumer['id'], repo_id, distributor_id)
        mock_bindings.assert_called_with([binding])

        mock_context.assert_called_with(
            consumer,
            task_id=task_id,
            action='bind',
            consumer_id=consumer['id'],
            repo_id=repo_id,
            distributor_id=distributor_id)

        mock_task_status.assert_called_with(task_id=task_id, worker_name='agent', tags=task_tags)
        mock_agent.bind.assert_called_with(mock_context.return_value, agent_bindings, options)
        mock_bind_manager.action_pending.assert_called_with(
            consumer['id'], repo_id, distributor_id, Bind.Action.BIND, task_id)
Example #60
0
    def post(self, request, consumer_id):
        """
        Creates an async task to regenerate content applicability data for given consumer.

        :param request: WSGI request object
        :type request: django.core.handlers.wsgi.WSGIRequest
        :param consumer_id: The consumer ID.
        :type consumer_id: str

        :raises MissingResource: if some parameters are missing
        :raises OperationPostponed: when an async operation is performed.
        """

        consumer_query_manager = factory.consumer_query_manager()
        if consumer_query_manager.find_by_id(consumer_id) is None:
            raise MissingResource(consumer_id=consumer_id)
        consumer_criteria = Criteria(filters={'consumer_id': consumer_id})

        task_tags = [tags.action_tag('consumer_content_applicability_regeneration')]
        async_result = regenerate_applicability_for_consumers.apply_async_with_reservation(
            tags.RESOURCE_CONSUMER_TYPE,
            consumer_id,
            (consumer_criteria.as_dict(),),
            tags=task_tags)
        raise OperationPostponed(async_result)