Beispiel #1
0
Datei: bind.py Projekt: omps/pulp
    def _validate_consumer_repo(consumer_id, repo_id, distributor_id):
        """
        Validate that the given consumer, repository, and distributor are present.
        Rather than raising an exception, this method returns a dictionary of missing
        values and allows the caller to decide what exception to raise.

        :param consumer_id:     The consumer id to validate
        :type  consumer_id:     str
        :param repo_id:         The repository id to validate
        :type  repo_id:         str
        :param distributor_id:  The distributor_id to validate
        :type  distributor_id:  str

        :return: A dictionary containing the missing values, or an empty dict if everything is valid
        :rtype:  dict
        """
        missing_values = {}

        try:
            factory.consumer_manager().get_consumer(consumer_id)
        except MissingResource:
            missing_values['consumer_id'] = consumer_id
        try:
            factory.repo_query_manager().get_repository(repo_id)
        except MissingResource:
            missing_values['repo_id'] = repo_id
        try:
            factory.repo_distributor_manager().get_distributor(
                repo_id, distributor_id)
        except MissingResource:
            missing_values['distributor_id'] = distributor_id

        return missing_values
Beispiel #2
0
 def DELETE(self, id):
     # validate
     manager_factory.repo_query_manager().get_repository(id)
     # delete
     call_requests = repo_delete_itinerary(id)
     _LOG.info('Itinerary: %s', [r.id for r in call_requests])
     execution.execute_multiple(call_requests)
Beispiel #3
0
    def POST(self, consumer_id):
        """
        Create a bind association between the specified
        consumer by id included in the URL path and a repo-distributor
        specified in the POST body: {repo_id:<str>, distributor_id:<str>}.
        Designed to be idempotent so only MissingResource is expected to
        be raised by manager.
        @param consumer_id: The consumer to bind.
        @type consumer_id: str
        @return: The list of call_reports
        @rtype: list
        """
        # validate consumer
        consumer_manager = managers.consumer_manager()
        consumer_manager.get_consumer(consumer_id)

        # get other options and validate them
        body = self.params()
        repo_id = body.get('repo_id')
        distributor_id = body.get('distributor_id')
        binding_config = body.get('binding_config', None)
        options = body.get('options', {})
        notify_agent = body.get('notify_agent', True)

        managers.repo_query_manager().get_repository(repo_id)
        managers.repo_distributor_manager().get_distributor(
            repo_id, distributor_id)

        # bind
        call_requests = bind_itinerary(consumer_id, repo_id, distributor_id,
                                       notify_agent, binding_config, options)
        execution.execute_multiple(call_requests)
Beispiel #4
0
 def DELETE(self, id):
     # validate
     manager_factory.repo_query_manager().get_repository(id)
     # delete
     call_requests = repo_delete_itinerary(id)
     _LOG.info('Itinerary: %s', [r.id for r in call_requests])
     execution.execute_multiple(call_requests)
Beispiel #5
0
Datei: bind.py Projekt: beav/pulp
    def _validate_consumer_repo(consumer_id, repo_id, distributor_id):
        """
        Validate that the given consumer, repository, and distributor are present.
        Rather than raising an exception, this method returns a dictionary of missing
        values and allows the caller to decide what exception to raise.

        :param consumer_id:     The consumer id to validate
        :type  consumer_id:     str
        :param repo_id:         The repository id to validate
        :type  repo_id:         str
        :param distributor_id:  The distributor_id to validate
        :type  distributor_id:  str

        :return: A dictionary containing the missing values, or an empty dict if everything is valid
        :rtype:  dict
        """
        missing_values = {}

        try:
            factory.consumer_manager().get_consumer(consumer_id)
        except MissingResource:
            missing_values['consumer_id'] = consumer_id
        try:
            factory.repo_query_manager().get_repository(repo_id)
        except MissingResource:
            missing_values['repo_id'] = repo_id
        try:
            factory.repo_distributor_manager().get_distributor(repo_id, distributor_id)
        except MissingResource:
            missing_values['distributor_id'] = distributor_id

        return missing_values
Beispiel #6
0
    def POST(self, consumer_id):
        """
        Create a bind association between the specified
        consumer by id included in the URL path and a repo-distributor
        specified in the POST body: {repo_id:<str>, distributor_id:<str>}.
        Designed to be idempotent so only MissingResource is expected to
        be raised by manager.
        @param consumer_id: The consumer to bind.
        @type consumer_id: str
        @return: The list of call_reports
        @rtype: list
        """
        # validate consumer
        consumer_manager = managers.consumer_manager()
        consumer_manager.get_consumer(consumer_id)

        # get other options and validate them
        body = self.params()
        repo_id = body.get('repo_id')
        distributor_id = body.get('distributor_id')
        binding_config = body.get('binding_config', None)
        options = body.get('options', {})
        notify_agent = body.get('notify_agent', True)

        managers.repo_query_manager().get_repository(repo_id)
        managers.repo_distributor_manager().get_distributor(repo_id, distributor_id)

        # bind
        call_requests = bind_itinerary(consumer_id, repo_id, distributor_id, notify_agent, binding_config, options)
        execution.execute_multiple(call_requests)
Beispiel #7
0
    def delete(self, request, repo_id):
        """
        Dispatch a task to delete a repository.

        :param request: WSGI request object
        :type  request: django.core.handlers.wsgi.WSGIRequest
        :param repo_id: id of repository to be removed
        :type  repo_id: str

        :rtype : django.http.HttpResponse
        :raises pulp_exceptions.OperationPostponed: dispatch a task to delete the provided repo
        """

        # validate
        manager_factory.repo_query_manager().get_repository(repo_id)

        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.action_tag('delete')
        ]
        async_result = repo_tasks.delete.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE, repo_id,
            [repo_id], tags=task_tags)

        raise pulp_exceptions.OperationPostponed(async_result)
Beispiel #8
0
    def DELETE(self, repo_id):
        # validate
        manager_factory.repo_query_manager().get_repository(repo_id)

        # delete
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("delete")]
        async_result = repository.delete.apply_async_with_reservation(
            dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id], tags=tags
        )

        raise exceptions.OperationPostponed(async_result)
Beispiel #9
0
    def DELETE(self, repo_id):
        # validate
        manager_factory.repo_query_manager().get_repository(repo_id)

        # delete
        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.action_tag('delete')
        ]
        async_result = repository.delete.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id], tags=task_tags)

        raise exceptions.OperationPostponed(async_result)
Beispiel #10
0
    def POST(self, repo_id):

        # Params
        params = self.params()
        overrides = params.get('override_config', None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)

        # Execute the sync asynchronously
        async_result = repository.sync_with_auto_publish(repo_id, overrides)

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        raise exceptions.OperationPostponed(async_result)
Beispiel #11
0
    def POST(self, repo_id):

        # Params
        params = self.params()
        overrides = params.get('override_config', None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)

        # Execute the sync asynchronously
        async_result = repository.sync_with_auto_publish(repo_id, overrides)

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        raise exceptions.OperationPostponed(async_result)
Beispiel #12
0
    def get(self, request, repo_id):
        """
        Looks for query parameters 'importers' and 'distributors', and will add
        the corresponding fields to the repository returned. Query parameter
        'details' is equivalent to passing both 'importers' and 'distributors'.

        :param request: WSGI request object
        :type  request: django.core.handlers.wsgi.WSGIRequest
        :param repo_id: id of requested repository
        :type  repo_id: str

        :return: Response containing a serialized dict for the requested repo.
        :rtype : django.http.HttpResponse
        :raises pulp_exceptions.MissingResource: if repo cannot be found
        """

        query_manager = manager_factory.repo_query_manager()
        repo = query_manager.find_by_id(repo_id)

        if repo is None:
            raise pulp_exceptions.MissingResource(repo=repo_id)

        repo['_href'] = reverse('repo_resource', kwargs={'repo_id': repo_id})
        _convert_repo_dates_to_strings(repo)

        details = request.GET.get('details', 'false').lower() == 'true'
        if request.GET.get('importers', 'false').lower() == 'true' or details:
            repo = _merge_related_objects(
                'importers', manager_factory.repo_importer_manager(), (repo,))[0]
        if request.GET.get('distributors', 'false').lower() == 'true' or details:
            repo = _merge_related_objects(
                'distributors', manager_factory.repo_distributor_manager(), (repo,))[0]

        return generate_json_response_with_pulp_encoder(repo)
Beispiel #13
0
    def regenerate_applicability_for_repos(repo_criteria):
        """
        Regenerate and save applicability data affected by given updated repositories.

        :param repo_criteria: The repo selection criteria
        :type repo_criteria: dict
        """
        repo_criteria = Criteria.from_dict(repo_criteria)
        repo_query_manager = managers.repo_query_manager()

        # Process repo criteria
        repo_criteria.fields = ['id']
        repo_ids = [r['id'] for r in repo_query_manager.find_by_criteria(repo_criteria)]

        for repo_id in repo_ids:
            # Find all existing applicabilities for given repo_id
            existing_applicabilities = RepoProfileApplicability.get_collection().find(
                {'repo_id': repo_id})
            for existing_applicability in existing_applicabilities:
                # Convert cursor to RepoProfileApplicability object
                existing_applicability = RepoProfileApplicability(**dict(existing_applicability))
                profile_hash = existing_applicability['profile_hash']
                unit_profile = UnitProfile.get_collection().find_one({'profile_hash': profile_hash},
                                                                     fields=['id', 'content_type'])
                if unit_profile is None:
                    # Unit profiles change whenever packages are installed or removed on consumers,
                    # and it is possible that existing_applicability references a UnitProfile
                    # that no longer exists. This is harmless, as Pulp has a monthly cleanup task
                    # that will identify these dangling references and remove them.
                    continue

                # Regenerate applicability data for given unit_profile and repo id
                ApplicabilityRegenerationManager.regenerate_applicability(
                    profile_hash, unit_profile['content_type'], unit_profile['id'], repo_id,
                    existing_applicability)
Beispiel #14
0
    def test_syntactic_sugar_methods(self):
        """
        Tests the syntactic sugar methods for retrieving specific managers.
        """
        # Setup
        factory.initialize()

        # Test
        self.assertTrue(isinstance(factory.authentication_manager(), AuthenticationManager))
        self.assertTrue(isinstance(factory.cert_generation_manager(), CertGenerationManager))
        self.assertTrue(isinstance(factory.certificate_manager(), CertificateManager))
        self.assertTrue(isinstance(factory.password_manager(), PasswordManager))
        self.assertTrue(isinstance(factory.permission_manager(), PermissionManager))
        self.assertTrue(isinstance(factory.permission_query_manager(), PermissionQueryManager))
        self.assertTrue(isinstance(factory.role_manager(), RoleManager))
        self.assertTrue(isinstance(factory.role_query_manager(), RoleQueryManager))
        self.assertTrue(isinstance(factory.user_manager(), UserManager))             
        self.assertTrue(isinstance(factory.user_query_manager(), UserQueryManager))
        self.assertTrue(isinstance(factory.repo_manager(), RepoManager))
        self.assertTrue(isinstance(factory.repo_unit_association_manager(), RepoUnitAssociationManager))
        self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager))
        self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager))
        self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager))
        self.assertTrue(isinstance(factory.content_manager(), ContentManager))
        self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager))
        self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager))
        self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager))
        self.assertTrue(isinstance(factory.topic_publish_manager(), TopicPublishManager))
Beispiel #15
0
    def regenerate_applicability_for_repos(repo_criteria):
        """
        Regenerate and save applicability data affected by given updated repositories.

        :param repo_criteria: The repo selection criteria
        :type repo_criteria: dict
        """
        repo_criteria = Criteria.from_dict(repo_criteria)
        repo_query_manager = managers.repo_query_manager()

        # Process repo criteria
        repo_criteria.fields = ["id"]
        repo_ids = [r["id"] for r in repo_query_manager.find_by_criteria(repo_criteria)]

        for repo_id in repo_ids:
            # Find all existing applicabilities for given repo_id
            existing_applicabilities = RepoProfileApplicability.get_collection().find({"repo_id": repo_id})
            for existing_applicability in existing_applicabilities:
                # Convert cursor to RepoProfileApplicability object
                existing_applicability = RepoProfileApplicability(**dict(existing_applicability))
                profile_hash = existing_applicability["profile_hash"]
                unit_profile = UnitProfile.get_collection().find_one(
                    {"profile_hash": profile_hash}, fields=["id", "content_type"]
                )
                # Regenerate applicability data for given unit_profile and repo id
                ApplicabilityRegenerationManager.regenerate_applicability(
                    profile_hash, unit_profile["content_type"], unit_profile["id"], repo_id, existing_applicability
                )
Beispiel #16
0
    def GET(self, id):
        """
        Looks for query parameters 'importers' and 'distributors', and will add
        the corresponding fields to the repository returned. Query parameter
        'details' is equivalent to passing both 'importers' and 'distributors'.
        """
        query_params = web.input()
        query_manager = manager_factory.repo_query_manager()
        repo = query_manager.find_by_id(id)

        if repo is None:
            raise exceptions.MissingResource(repo=id)

        repo.update(serialization.link.current_link_obj())
        _convert_repo_dates_to_strings(repo)

        if query_params.get('details', False):
            query_params['importers'] = True
            query_params['distributors'] = True

        if query_params.get('importers', False):
            repo = _merge_related_objects(
                'importers', manager_factory.repo_importer_manager(),
                (repo, ))[0]
        if query_params.get('distributors', False):
            repo = _merge_related_objects(
                'distributors', manager_factory.repo_distributor_manager(),
                (repo, ))[0]

        return self.ok(repo)
Beispiel #17
0
def verify_group_resources(group_id, repo_id, distributor_id):
    """
    Confirm the group, repository, and distributor exist.

    :param group_id: The consumer group id to verify the existence of
    :type group_id: str
    :param repo_id: The repository id to confirm the existence of
    :type repo_id: str
    :param distributor_id: The distributor id to confirm the existence of on the repository
    :type distributor_id: str
    :return: A dictionary of the missing resources
    :rtype: dict
    """
    missing_resources = {}
    group_manager = factory.consumer_group_query_manager()
    repo_manager = factory.repo_query_manager()
    distributor_manager = factory.repo_distributor_manager()
    try:
        group_manager.get_group(group_id)
    except pulp_exceptions.MissingResource:
        missing_resources['group_id'] = group_id
    repo = repo_manager.find_by_id(repo_id)
    if repo is None:
        missing_resources['repo_id'] = repo_id
    try:
        distributor_manager.get_distributor(repo_id, distributor_id)
    except pulp_exceptions.MissingResource:
        missing_resources['distributor_id'] = distributor_id
    return missing_resources
Beispiel #18
0
def remove_from_importer(repo_id, removed_units):

    # Retrieve the repo from the database and convert to the transfer repo
    repo_query_manager = manager_factory.repo_query_manager()
    repo = repo_query_manager.get_repository(repo_id)

    importer_manager = manager_factory.repo_importer_manager()
    repo_importer = importer_manager.get_importer(repo_id)

    transfer_repo = common_utils.to_transfer_repo(repo)
    transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True)

    # Convert the units into transfer units
    unit_type_ids = calculate_associated_type_ids(repo_id, removed_units)
    transfer_units = create_transfer_units(removed_units, unit_type_ids)

    # Retrieve the plugin instance to invoke
    importer_instance, plugin_config = plugin_api.get_importer_by_id(repo_importer['importer_type_id'])
    call_config = PluginCallConfiguration(plugin_config, repo_importer['config'])

    # Invoke the importer's remove method
    try:
        importer_instance.remove_units(transfer_repo, transfer_units, call_config)
    except Exception:
        _LOG.exception('Exception from importer [%s] while removing units from repo [%s]' % (repo_importer['id'], repo_id))
Beispiel #19
0
    def GET(self, id):
        """
        Looks for query parameters 'importers' and 'distributors', and will add
        the corresponding fields to the repository returned. Query parameter
        'details' is equivalent to passing both 'importers' and 'distributors'.
        """
        query_params = web.input()
        query_manager = manager_factory.repo_query_manager()
        repo = query_manager.find_by_id(id)

        if repo is None:
            raise exceptions.MissingResource(id)

        repo.update(serialization.link.current_link_obj())

        if query_params.get('details', False):
            query_params['importers'] = True
            query_params['distributors'] = True

        if query_params.get('importers', False):
            repo = _merge_related_objects('importers', manager_factory.repo_importer_manager(), (repo,))[0]
        if query_params.get('distributors', False):
            repo = _merge_related_objects('distributors', manager_factory.repo_distributor_manager(), (repo,))[0]

        return self.ok(repo)
Beispiel #20
0
def remove_from_importer(repo_id, transfer_units):

    # Retrieve the repo from the database and convert to the transfer repo
    repo_query_manager = manager_factory.repo_query_manager()
    repo = repo_query_manager.get_repository(repo_id)

    importer_manager = manager_factory.repo_importer_manager()
    repo_importer = importer_manager.get_importer(repo_id)

    transfer_repo = common_utils.to_transfer_repo(repo)
    transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'],
                                                                  repo_id, mkdir=True)

    # Retrieve the plugin instance to invoke
    importer_instance, plugin_config = plugin_api.get_importer_by_id(
        repo_importer['importer_type_id'])
    call_config = PluginCallConfiguration(plugin_config, repo_importer['config'])

    # Invoke the importer's remove method
    try:
        importer_instance.remove_units(transfer_repo, transfer_units, call_config)
    except Exception:
        msg = _('Exception from importer [%(i)s] while removing units from repo [%(r)s]')
        msg = msg % {'i': repo_importer['id'], 'r': repo_id}
        logger.exception(msg)
Beispiel #21
0
    def test_syntactic_sugar_methods(self):
        """
        Tests the syntactic sugar methods for retrieving specific managers.
        """
        # Setup
        factory.initialize()

        # Test
        self.assertTrue(isinstance(factory.authentication_manager(), AuthenticationManager))
        self.assertTrue(isinstance(factory.cert_generation_manager(), CertGenerationManager))
        self.assertTrue(isinstance(factory.certificate_manager(), CertificateManager))
        self.assertTrue(isinstance(factory.password_manager(), PasswordManager))
        self.assertTrue(isinstance(factory.permission_manager(), PermissionManager))
        self.assertTrue(isinstance(factory.permission_query_manager(), PermissionQueryManager))
        self.assertTrue(isinstance(factory.role_manager(), RoleManager))
        self.assertTrue(isinstance(factory.role_query_manager(), RoleQueryManager))
        self.assertTrue(isinstance(factory.user_manager(), UserManager))
        self.assertTrue(isinstance(factory.user_query_manager(), UserQueryManager))
        self.assertTrue(isinstance(factory.repo_manager(), RepoManager))
        self.assertTrue(isinstance(factory.repo_unit_association_manager(),
                                   RepoUnitAssociationManager))
        self.assertTrue(isinstance(factory.repo_publish_manager(), RepoPublishManager))
        self.assertTrue(isinstance(factory.repo_query_manager(), RepoQueryManager))
        self.assertTrue(isinstance(factory.repo_sync_manager(), RepoSyncManager))
        self.assertTrue(isinstance(factory.content_manager(), ContentManager))
        self.assertTrue(isinstance(factory.content_query_manager(), ContentQueryManager))
        self.assertTrue(isinstance(factory.content_upload_manager(), ContentUploadManager))
        self.assertTrue(isinstance(factory.consumer_manager(), ConsumerManager))
        self.assertTrue(isinstance(factory.topic_publish_manager(), TopicPublishManager))
Beispiel #22
0
    def POST(self, repo_id):
        # Params
        params = self.params()
        query = params.get('criteria', None)

        repo_query_manager = manager_factory.repo_query_manager()
        repo = repo_query_manager.find_by_id(repo_id)
        if repo is None:
            raise exceptions.MissingResource(repo_id=repo_id)

        if query is None:
            raise exceptions.MissingValue(['criteria'])

        try:
            criteria = UnitAssociationCriteria.from_client_input(query)
        except:
            _logger.error('Error parsing association criteria [%s]' % query)
            raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        # Data lookup
        manager = manager_factory.repo_unit_association_query_manager()
        if criteria.type_ids is not None and len(criteria.type_ids) == 1:
            type_id = criteria.type_ids[0]
            units = manager.get_units_by_type(repo_id,
                                              type_id,
                                              criteria=criteria)
        else:
            units = manager.get_units_across_types(repo_id, criteria=criteria)

        return self.ok(units)
Beispiel #23
0
 def create_repo_group(group_id,
                       display_name=None,
                       description=None,
                       repo_ids=None,
                       notes=None):
     """
     Create a new repo group.
     :param group_id: unique id of the repo group
     :param display_name: display name of the repo group
     :type  display_name: str or None
     :param description: description of the repo group
     :type  description: str or None
     :param repo_ids: list of ids for repos initially belonging to the repo group
     :type  repo_ids: list or None
     :param notes: notes for the repo group
     :type  notes: dict or None
     :return: SON representation of the repo group
     :rtype: bson.SON
     """
     if repo_ids:
         # Check if ids in repo_ids belong to existing repositories
         repo_query_manager = manager_factory.repo_query_manager()
         for repo_id in repo_ids:
             repo_query_manager.get_repository(repo_id)
     # Create repo group
     collection = RepoGroup.get_collection()
     repo_group = RepoGroup(group_id, display_name, description, repo_ids,
                            notes)
     try:
         collection.insert(repo_group, safe=True)
     except DuplicateKeyError:
         raise pulp_exceptions.DuplicateResource(
             group_id), None, sys.exc_info()[2]
     group = collection.find_one({'id': group_id})
     return group
Beispiel #24
0
    def regenerate_applicability_for_repos(self, repo_criteria=None):
        """
        Regenerate and save applicability data affected by given updated repositories.

        :param repo_criteria: The repo selection criteria
        :type repo_criteria: pulp.server.db.model.criteria.Criteria
        """
        repo_query_manager = managers.repo_query_manager()

        # Process repo criteria
        repo_criteria.fields = ['id']
        repo_ids = [r['id'] for r in repo_query_manager.find_by_criteria(repo_criteria)]

        for repo_id in repo_ids:
            # Find all existing applicabilities for given repo_id
            existing_applicabilities = RepoProfileApplicability.get_collection().find({'repo_id':repo_id})
            for existing_applicability in existing_applicabilities:
                # Convert cursor to RepoProfileApplicability object
                existing_applicability = RepoProfileApplicability(**dict(existing_applicability))
                profile_hash = existing_applicability['profile_hash']
                unit_profile = UnitProfile.get_collection().find_one({'profile_hash': profile_hash},
                                                                     fields=['id','content_type'])
                # Regenerate applicability data for given unit_profile and repo id
                self.regenerate_applicability(profile_hash, unit_profile['content_type'],
                                              unit_profile['id'],
                                              repo_id,
                                              existing_applicability)
Beispiel #25
0
Datei: cud.py Projekt: beav/pulp
 def create_repo_group(group_id, display_name=None, description=None, repo_ids=None, notes=None):
     """
     Create a new repo group.
     :param group_id: unique id of the repo group
     :param display_name: display name of the repo group
     :type  display_name: str or None
     :param description: description of the repo group
     :type  description: str or None
     :param repo_ids: list of ids for repos initially belonging to the repo group
     :type  repo_ids: list or None
     :param notes: notes for the repo group
     :type  notes: dict or None
     :return: SON representation of the repo group
     :rtype: bson.SON
     """
     if repo_ids:
         # Check if ids in repo_ids belong to existing repositories
         repo_query_manager = manager_factory.repo_query_manager()
         for repo_id in repo_ids:
             repo_query_manager.get_repository(repo_id)
     # Create repo group
     collection = RepoGroup.get_collection()
     repo_group = RepoGroup(group_id, display_name, description, repo_ids, notes)
     try:
         collection.insert(repo_group, safe=True)
     except DuplicateKeyError:
         raise pulp_exceptions.DuplicateResource(group_id), None, sys.exc_info()[2]
     group = collection.find_one({'id': group_id})
     return group
Beispiel #26
0
def verify_group_resources(group_id, repo_id, distributor_id):
    """
    Confirm the group, repository, and distributor exist.

    :param group_id: The consumer group id to verify the existence of
    :type group_id: str
    :param repo_id: The repository id to confirm the existence of
    :type repo_id: str
    :param distributor_id: The distributor id to confirm the existence of on the repository
    :type distributor_id: str
    :return: A dictionary of the missing resources
    :rtype: dict
    """
    missing_resources = {}
    group_manager = factory.consumer_group_query_manager()
    repo_manager = factory.repo_query_manager()
    distributor_manager = factory.repo_distributor_manager()
    try:
        group_manager.get_group(group_id)
    except pulp_exceptions.MissingResource:
        missing_resources['group_id'] = group_id
    repo = repo_manager.find_by_id(repo_id)
    if repo is None:
        missing_resources['repo_id'] = repo_id
    try:
        distributor_manager.get_distributor(repo_id, distributor_id)
    except pulp_exceptions.MissingResource:
        missing_resources['distributor_id'] = distributor_id
    return missing_resources
Beispiel #27
0
 def verify(self, num_units=PluginTestBase.NUM_UNITS):
     # repository
     manager = managers.repo_query_manager()
     manager.get_repository(self.REPO_ID)
     # importer
     manager = managers.repo_importer_manager()
     importer = manager.get_importer(self.REPO_ID)
     manifest_url = importer['config'][constants.MANIFEST_URL_KEYWORD]
     self.assertTrue(manifest_url.endswith('%s/manifest.json.gz' % self.REPO_ID))
     # distributor
     manager = managers.repo_distributor_manager()
     manager.get_distributor(self.REPO_ID, FAKE_DISTRIBUTOR)
     self.assertRaises(MissingResource, manager.get_distributor, self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     # check units
     manager = managers.repo_unit_association_query_manager()
     units = manager.get_units(self.REPO_ID)
     units = dict([(u['metadata']['N'], u) for u in units])
     self.assertEqual(len(units), num_units)
     for n in range(0, num_units):
         unit = units[n]
         unit_id = self.UNIT_ID % n
         metadata = unit['metadata']
         storage_path = metadata['_storage_path'].replace('//', '/')
         self.assertEqual(unit['unit_type_id'], self.UNIT_TYPE_ID)
         self.assertEqual(unit['repo_id'], self.REPO_ID)
         self.assertEqual(unit['owner_id'], constants.HTTP_IMPORTER)
         file_path = '.'.join((unit_id, self.UNIT_TYPE_ID))
         self.assertEqual(storage_path, os.path.join(self.childfs, 'content', file_path))
         self.assertTrue(os.path.exists(storage_path))
         fp = open(storage_path)
         content = fp.read()
         fp.close()
         self.assertEqual(content, unit_id)
Beispiel #28
0
    def GET(self, consumer_id, repo_id=None):
        """
        Fetch all bind objects referencing the specified consumer_id. Optionally,
        specify a repo_id to fetch all bind objects for the consumer_id to the repo_id

        :param consumer_id: The specified consumer.
        :type  consumer_id: str
        :param repo_id:     The repository to retrieve bindings for (optional)
        :type  repo_id:     str

        :return: A list of dictionaries that represent pulp.server.db.model.consumer.Bind objects
        :rtype:  list
        """
        # Check to make sure the resources exist
        missing_resources = {}
        if repo_id is not None:
            repo = managers.repo_query_manager().find_by_id(repo_id)
            if repo is None:
                missing_resources['repo_id'] = repo_id
        # If get_consumer raises MissingResource we might miss reporting a bad repo_id
        try:
            managers.consumer_manager().get_consumer(consumer_id)
        except MissingResource:
            missing_resources['consumer_id'] = consumer_id

        if len(missing_resources) > 0:
            raise MissingResource(**missing_resources)

        manager = managers.consumer_bind_manager()
        bindings = manager.find_by_consumer(consumer_id, repo_id)
        bindings = [serialization.binding.serialize(b) for b in bindings]
        return self.ok(bindings)
Beispiel #29
0
def remove_from_importer(repo_id, transfer_units):
    # Retrieve the repo from the database and convert to the transfer repo
    repo_query_manager = manager_factory.repo_query_manager()
    repo = repo_query_manager.get_repository(repo_id)

    importer_manager = manager_factory.repo_importer_manager()
    repo_importer = importer_manager.get_importer(repo_id)

    transfer_repo = common_utils.to_transfer_repo(repo)
    transfer_repo.working_dir = common_utils.importer_working_dir(
        repo_importer['importer_type_id'], repo_id, mkdir=True)

    # Retrieve the plugin instance to invoke
    importer_instance, plugin_config = plugin_api.get_importer_by_id(
        repo_importer['importer_type_id'])
    call_config = PluginCallConfiguration(plugin_config,
                                          repo_importer['config'])

    # Invoke the importer's remove method
    try:
        importer_instance.remove_units(transfer_repo, transfer_units,
                                       call_config)
    except Exception:
        msg = _(
            'Exception from importer [%(i)s] while removing units from repo [%(r)s]'
        )
        msg = msg % {'i': repo_importer['id'], 'r': repo_id}
        logger.exception(msg)
Beispiel #30
0
    def POST(self, repo_id):
        # Params
        params = self.params()
        query = params.get('criteria', None)

        repo_query_manager = manager_factory.repo_query_manager()
        repo = repo_query_manager.find_by_id(repo_id)
        if repo is None:
            raise exceptions.MissingResource(repo_id=repo_id)

        if query is None:
            raise exceptions.MissingValue(['criteria'])

        try:
            criteria = UnitAssociationCriteria.from_client_input(query)
        except:
            _LOG.exception('Error parsing association criteria [%s]' % query)
            raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        # Data lookup
        manager = manager_factory.repo_unit_association_query_manager()
        if criteria.type_ids is not None and len(criteria.type_ids) == 1:
            type_id = criteria.type_ids[0]
            units = manager.get_units_by_type(repo_id, type_id, criteria=criteria)
        else:
            units = manager.get_units_across_types(repo_id, criteria=criteria)

        return self.ok(units)
Beispiel #31
0
    def resolve_dependencies_by_units(repo_id, units, options):
        """
        Calculates dependencies for the given set of units in the given
        repository.

        :param repo_id:         identifies the repository
        :type  repo_id:         str
        :param units:           list of database representations of units to resolve dependencies
                                for
        :type  units:           list
        :param options:         dict of options to pass the importer to drive the resolution
        :type  options:         dict or None
        :return:                report from the plugin
        :rtype:                 object
        :raise MissingResource: if the repo does not exist or does not have an importer
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        # The following will raise MissingResource as appropriate
        repo = repo_query_manager.get_repository(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Package for the importer call
        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], options)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            repo_importer['importer_type_id'], repo_id, mkdir=True)

        conduit = DependencyResolutionConduit(repo_id, repo_importer['id'])

        # Convert all of the units into the plugin standard representation
        transfer_units = []

        # Preload all the type defs so we don't hammer the database unnecessarily
        type_defs = {}
        all_type_def_ids = set([u['unit_type_id'] for u in units])
        for def_id in all_type_def_ids:
            type_def = types_db.type_definition(def_id)
            type_defs[def_id] = type_def

        for unit in units:
            type_id = unit['unit_type_id']
            u = conduit_common_utils.to_plugin_associated_unit(unit, type_defs[type_id])
            transfer_units.append(u)

        # Invoke the importer
        try:
            dep_report = importer_instance.resolve_dependencies(transfer_repo, transfer_units,
                                                                conduit, call_config)
        except Exception, e:
            raise PulpExecutionException(), None, sys.exc_info()[2]
Beispiel #32
0
    def resolve_dependencies_by_units(repo_id, units, options):
        """
        Calculates dependencies for the given set of units in the given
        repository.

        :param repo_id:         identifies the repository
        :type  repo_id:         str
        :param units:           list of database representations of units to resolve dependencies
                                for
        :type  units:           list
        :param options:         dict of options to pass the importer to drive the resolution
        :type  options:         dict or None
        :return:                report from the plugin
        :rtype:                 object
        :raise MissingResource: if the repo does not exist or does not have an importer
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        # The following will raise MissingResource as appropriate
        repo = repo_query_manager.get_repository(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Package for the importer call
        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], options)
        transfer_repo = common_utils.to_transfer_repo(repo)

        conduit = DependencyResolutionConduit(repo_id, repo_importer['id'])

        # Convert all of the units into the plugin standard representation
        transfer_units = []

        # Preload all the type defs so we don't hammer the database unnecessarily
        type_defs = {}
        all_type_def_ids = set([u['unit_type_id'] for u in units])
        for def_id in all_type_def_ids:
            type_def = types_db.type_definition(def_id)
            type_defs[def_id] = type_def

        for unit in units:
            type_id = unit['unit_type_id']
            u = conduit_common_utils.to_plugin_associated_unit(unit, type_defs[type_id])
            transfer_units.append(u)

        # Invoke the importer
        try:
            dep_report = importer_instance.resolve_dependencies(transfer_repo, transfer_units,
                                                                conduit, call_config)
        except Exception:
            raise PulpExecutionException(), None, sys.exc_info()[2]

        return dep_report
Beispiel #33
0
    def setUp(self):
        super(RepoQueryManagerTests, self).setUp()
        mock_plugins.install()

        self.repo_manager = manager_factory.repo_manager()
        self.importer_manager = manager_factory.repo_importer_manager()
        self.distributor_manager = manager_factory.repo_distributor_manager()
        self.query_manager = manager_factory.repo_query_manager()
    def setUp(self):
        super(RepoQueryManagerTests, self).setUp()
        mock_plugins.install()

        self.repo_manager = manager_factory.repo_manager()
        self.importer_manager = manager_factory.repo_importer_manager()
        self.distributor_manager = manager_factory.repo_distributor_manager()
        self.query_manager = manager_factory.repo_query_manager()
Beispiel #35
0
    def import_uploaded_unit(self, repo_id, unit_type_id, unit_key, unit_metadata, upload_id):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        @param repo_id: identifies the repository into which the unit is uploaded
        @type  repo_id: str

        @param unit_type_id: type of unit being uploaded
        @type  unit_type_id: str

        @param unit_key: unique identifier for the unit (user-specified)
        @type  unit_key: dict

        @param unit_metadata: any user-specified information about the unit
        @type  unit_metadata: dict

        @param upload_id: upload being imported
        @type  upload_id: str
        """

        # If it doesn't raise an exception, it's good to go
        self.is_valid_upload(repo_id, unit_type_id)

        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        repo = repo_query_manager.find_by_id(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER, pulp_principal.get_principal()['login'])

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], None)
        transfer_repo = repo_common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = repo_common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True)

        file_path = self._upload_file_path(upload_id)

        # Invoke the importer
        try:
            # def upload_unit(self, type_id, unit_key, metadata, file_path, conduit, config):
            report = importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key, unit_metadata, file_path, conduit, call_config)
        except Exception, e:
            _LOG.exception('Error from the importer while importing uploaded unit to repository [%s]' % repo_id)
            raise PulpExecutionException(e), None, sys.exc_info()[2]
Beispiel #36
0
    def POST(self, repo_id):

        # TODO: Add timeout support

        # Params
        params = self.params()
        overrides = params.get('override_config', None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)

        # Execute the sync asynchronously

        call_requests = sync_with_auto_publish_itinerary(repo_id, overrides)

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        execution.execute_multiple(call_requests)
Beispiel #37
0
    def post(self, request, repo_id):
        """
        Dispatch a task to sync a repository.

        :param request: WSGI request object
        :type  request: django.core.handlers.wsgi.WSGIRequest
        :param repo_id: id of the repository to sync
        :type  repo_id: str

        :raises pulp_exceptions.OperationPostponed: dispatch a sync repo task
        """

        overrides = request.body_as_json.get('override_config', None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)
        async_result = repo_tasks.sync_with_auto_publish(repo_id, overrides)
        raise pulp_exceptions.OperationPostponed(async_result)
Beispiel #38
0
    def POST(self, repo_id):

        # Params
        params = self.params()
        overrides = params.get("override_config", None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)

        # Execute the sync asynchronously
        tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id), action_tag("sync")]
        async_result = repository.sync_with_auto_publish.apply_async_with_reservation(
            dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id, [repo_id, overrides], {}, tags=tags
        )

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        raise exceptions.OperationPostponed(async_result)
Beispiel #39
0
    def POST(self, repo_id):

        # TODO: Add timeout support

        # Params
        params = self.params()
        overrides = params.get('override_config', None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)

        # Execute the sync asynchronously

        call_requests = sync_with_auto_publish_itinerary(repo_id, overrides)

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        execution.execute_multiple(call_requests)
Beispiel #40
0
 def _add_repository(self, repo_id, payload):
     """
     Add repository information to the payload.
     :param repo_id: The repository ID.
     :type repo_id: str
     :param payload: The repository payload
     :type payload: dict
     """
     manager = factory.repo_query_manager()
     payload['repository'] = manager.get_repository(repo_id)
Beispiel #41
0
 def _add_repository(self, repo_id, payload):
     """
     Add repository information to the payload.
     :param repo_id: The repository ID.
     :type repo_id: str
     :param payload: The repository payload
     :type payload: dict
     """
     manager = factory.repo_query_manager()
     payload['repository'] = manager.get_repository(repo_id)
def _get_repos():
    """
     Lookups all the yum based repos in pulp.
     @return a list of repoids
    """
    repos = factory.repo_query_manager().find_with_importer_type("yum_importer")
    if not repos:
        _log.debug("No repos found to perform db migrate")
        return []
    repo_ids = [repo['id'] for repo in repos]
    return repo_ids
Beispiel #43
0
    def POST(self, repo_id):
        # validation
        manager = manager_factory.repo_query_manager()
        manager.get_repository(repo_id)

        # Params
        params = self.params()
        distributor_id = params.get('id', None)
        overrides = params.get('override_config', None)
        async_result = repository.publish(repo_id, distributor_id, overrides)
        raise exceptions.OperationPostponed(async_result)
Beispiel #44
0
    def POST(self, repo_id):
        # validation
        manager = manager_factory.repo_query_manager()
        manager.get_repository(repo_id)

        # Params
        params = self.params()
        distributor_id = params.get('id', None)
        overrides = params.get('override_config', None)
        async_result = repository.publish(repo_id, distributor_id, overrides)
        raise exceptions.OperationPostponed(async_result)
Beispiel #45
0
    def unassociate_all_by_ids(self, repo_id, unit_type_id, unit_id_list, owner_type, owner_id):
        """
        Removes the association between a repo and a number of units. Only the
        association made by the given owner will be removed. It is possible the
        repo will still have a manually created association will for the unit.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param unit_type_id: identifies the type of units being removed
        @type  unit_type_id: str

        @param unit_id_list: list of unique identifiers for units within the given type
        @type  unit_id_list: list of str

        @param owner_type: category of the caller who created the association;
                           must be one of the OWNER_* variables in this module
        @type  owner_type: str

        @param owner_id: identifies the caller who created the association, either
                         the importer ID or user login
        @type  owner_id: str
        """
        spec = {'repo_id' : repo_id,
                'unit_type_id' : unit_type_id,
                'unit_id' : {'$in' : unit_id_list},
                'owner_type' : owner_type,
                'owner_id' : owner_id,
                }

        unit_coll = RepoContentUnit.get_collection()
        unit_coll.remove(spec, safe=True)

        unique_count = sum(1 for unit_id in unit_id_list
            if not self.association_exists(repo_id, unit_id, unit_type_id))

        # update the count of associated units on the repo object
        if unique_count:
            manager_factory.repo_manager().update_unit_count(
                repo_id, -unique_count)

        try:
            repo_query_manager = manager_factory.repo_query_manager()
            repo = repo_query_manager.get_repository(repo_id)

            content_query_manager = manager_factory.content_query_manager()
            content_units = content_query_manager.get_multiple_units_by_ids(unit_type_id, unit_id_list)

            importer_manager = manager_factory.repo_importer_manager()
            importer = importer_manager.get_importer(repo_id)

            importer.remove_units(repo, content_units)
        except:
            _LOG.exception('Exception informing importer for [%s] of unassociation' % repo_id)
def _get_repos():
    """
     Lookups all the yum based repos in pulp.
     @return a list of repoids
    """
    repos = factory.repo_query_manager().find_with_importer_type(
        "yum_importer")
    if not repos:
        _log.debug("No repos found to perform db migrate")
        return []
    repo_ids = [repo['id'] for repo in repos]
    return repo_ids
Beispiel #47
0
    def POST(self, dest_repo_id):

        # Params
        params = self.params()
        source_repo_id = params.get('source_repo_id', None)
        overrides = params.get('override_config', None)

        if source_repo_id is None:
            raise exceptions.MissingValue(['source_repo_id'])

        # A 404 only applies to things in the URL, so the destination repo
        # check allows the MissingResource to bubble up, but if the source
        # repo doesn't exist, it's considered bad data.
        repo_query_manager = manager_factory.repo_query_manager()
        repo_query_manager.get_repository(dest_repo_id)

        try:
            repo_query_manager.get_repository(source_repo_id)
        except exceptions.MissingResource:
            raise exceptions.InvalidValue(['source_repo_id'])

        criteria = params.get('criteria', None)
        if criteria is not None:
            try:
                criteria = UnitAssociationCriteria.from_client_input(criteria)
            except:
                _LOG.error('Error parsing association criteria [%s]' %
                           criteria)
                raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        association_manager = manager_factory.repo_unit_association_manager()
        tags = [
            resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE,
                         dest_repo_id),
            resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE,
                         source_repo_id),
            action_tag('associate')
        ]
        call_request = CallRequest(
            association_manager.associate_from_repo,
            [source_repo_id, dest_repo_id], {
                'criteria': criteria,
                'import_config_override': overrides
            },
            tags=tags,
            archive=True,
            kwarg_blacklist=['criteria', 'import_config_override'])
        call_request.reads_resource(
            dispatch_constants.RESOURCE_REPOSITORY_TYPE, source_repo_id)
        call_request.updates_resource(
            dispatch_constants.RESOURCE_REPOSITORY_TYPE, dest_repo_id)
        return execution.execute_async(self, call_request)
Beispiel #48
0
def preserve_custom_metadata_on_repo_scratchpad():
    """
     Lookups all the yum based repos in pulp; grabs any custom metadata
     and set the the data on repo scratchpad.
    """
    factory.initialize()
    repos = factory.repo_query_manager().find_with_importer_type(
        "yum_importer")
    if not repos:
        _log.debug("No repos found to perform db migrate")
        return
    repo_ids = [repo['id'] for repo in repos]
    for repo_id in repo_ids:
        _log.debug("Processing repo %s" % repo_id)
        repo_scratchpad = factory.repo_manager().get_repo_scratchpad(repo_id)
        if "repodata" in repo_scratchpad and repo_scratchpad["repodata"]:
            # repo scratchpad already has repodata, skip migration
            _log.debug(
                "repo [%s] scratchpad already has repodata, skip migration" %
                repo_id)
            continue
        repo_working_dir = importer_working_dir('yum_importer', repo_id)
        importer_repodata_dir = os.path.join(repo_working_dir, repo_id,
                                             "repodata")
        repomd_xml_path = os.path.join(importer_repodata_dir, "repomd.xml")
        if not os.path.exists(repomd_xml_path):
            # repodata doesn't exist on filesystem cannot lookup custom data, continue to next
            continue
        ftypes = util.get_repomd_filetypes(repomd_xml_path)
        base_ftypes = [
            'primary', 'primary_db', 'filelists_db', 'filelists', 'other',
            'other_db', 'group', 'group_gz', 'updateinfo', 'updateinfo_db'
        ]
        for ftype in ftypes:
            if ftype in base_ftypes:
                # no need to process these again
                continue
            filetype_path = os.path.join(
                importer_repodata_dir,
                os.path.basename(
                    util.get_repomd_filetype_path(repomd_xml_path, ftype)))
            if filetype_path.endswith('.gz'):
                # if file is gzipped, decompress
                data = gzip.open(filetype_path).read().decode(
                    "utf-8", "replace")
            else:
                data = open(filetype_path).read().decode("utf-8", "replace")
            repo_scratchpad["repodata"].update({ftype: data})
        # set the custom metadata on scratchpad
        factory.repo_manager().set_repo_scratchpad(repo_id, repo_scratchpad)
        _log.info("Updated repo [%s] scratchpad with new custom repodata" %
                  repo_id)
Beispiel #49
0
    def POST(self, repo_id):

        # Params
        params = self.params()
        overrides = params.get('override_config', None)

        # Check for repo existence and let the missing resource bubble up
        manager_factory.repo_query_manager().get_repository(repo_id)

        # Execute the sync asynchronously
        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, repo_id),
            tags.action_tag('sync')
        ]
        async_result = repository.sync_with_auto_publish.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE,
            repo_id, [repo_id, overrides], {},
            tags=task_tags)

        # this raises an exception that is handled by the middleware,
        # so no return is needed
        raise exceptions.OperationPostponed(async_result)
Beispiel #50
0
    def unassociate_by_criteria(self, repo_id, criteria, owner_type, owner_id):
        """
        Unassociate units that are matched by the given criteria.
        @param repo_id: identifies the repo
        @type repo_id: str
        @param criteria:
        @param owner_type: category of the caller who created the association
        @type owner_type: str
        @param owner_id: identifies the call who created the association
        @type owner_id: str
        """
        association_query_manager = manager_factory.repo_unit_association_query_manager()
        unassociate_units = association_query_manager.get_units(repo_id, criteria=criteria)

        if len(unassociate_units) is 0:
            return

        unit_map = {} # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit['unit_type_id'], [])
            id_list.append(unit['unit_id'])

        collection = RepoContentUnit.get_collection()
        repo_manager = manager_factory.repo_manager()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {'repo_id': repo_id,
                    'unit_type_id': unit_type_id,
                    'unit_id': {'$in': unit_ids},
                    'owner_type': owner_type,
                    'owner_id': owner_id}
            collection.remove(spec, safe=True)

            unique_count = sum(1 for unit_id in unit_ids
                               if not self.association_exists(repo_id, unit_id, unit_type_id))
            if not unique_count:
                continue

            repo_manager.update_unit_count(repo_id, -unique_count)

        try:
            repo_query_manager = manager_factory.repo_query_manager()
            repo = repo_query_manager.get_repository(repo_id)

            importer_manager = manager_factory.repo_importer_manager()
            importer = importer_manager.get_importer(repo_id)

            importer.remove_units(repo, unassociate_units)
        except:
            _LOG.exception('Exception informing importer for [%s] of unassociation' % repo_id)
Beispiel #51
0
    def POST(self, repo_id):

        # validation
        manager = manager_factory.repo_query_manager()
        manager.get_repository(repo_id)

        # Params
        params = self.params()
        distributor_id = params.get('id', None)
        overrides = params.get('override_config', None)

        call_request = publish_itinerary(repo_id, distributor_id, overrides)[0]

        return execution.execute_async(self, call_request)
Beispiel #52
0
    def _get_existing_repo_content_types(repo_id):
        """
        For the given repo_id, return a list of content_type_ids that have content units counts greater than 0.

        :param repo_id: The repo_id for the repository that we wish to know the unit types contained therein
        :type  repo_id: basestring
        :return:        A list of content type ids that have unit counts greater than 0
        :rtype:         list
        """
        repo_content_types_with_non_zero_unit_count = []
        repo = managers.repo_query_manager().find_by_id(repo_id)
        if repo:
            for content_type, count in repo['content_unit_counts'].items():
                if count > 0:
                    repo_content_types_with_non_zero_unit_count.append(
                        content_type)
        return repo_content_types_with_non_zero_unit_count
Beispiel #53
0
    def POST(self, dest_repo_id):

        # Params
        params = self.params()
        source_repo_id = params.get('source_repo_id', None)
        overrides = params.get('override_config', None)

        if source_repo_id is None:
            raise exceptions.MissingValue(['source_repo_id'])

        # A 404 only applies to things in the URL, so the destination repo
        # check allows the MissingResource to bubble up, but if the source
        # repo doesn't exist, it's considered bad data.
        repo_query_manager = manager_factory.repo_query_manager()
        repo_query_manager.get_repository(dest_repo_id)

        try:
            repo_query_manager.get_repository(source_repo_id)
        except exceptions.MissingResource:
            raise exceptions.InvalidValue(['source_repo_id'])

        criteria = params.get('criteria', None)
        if criteria is not None:
            try:
                criteria = UnitAssociationCriteria.from_client_input(criteria)
            except:
                _logger.error('Error parsing association criteria [%s]' %
                              criteria)
                raise exceptions.PulpDataException(), None, sys.exc_info()[2]

        task_tags = [
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, dest_repo_id),
            tags.resource_tag(tags.RESOURCE_REPOSITORY_TYPE, source_repo_id),
            tags.action_tag('associate')
        ]
        async_result = associate_from_repo.apply_async_with_reservation(
            tags.RESOURCE_REPOSITORY_TYPE,
            dest_repo_id, [source_repo_id, dest_repo_id], {
                'criteria': criteria,
                'import_config_override': overrides
            },
            tags=task_tags)
        raise exceptions.OperationPostponed(async_result)
Beispiel #54
0
    def regenerate_applicability_for_repos(repo_criteria):
        """
        Regenerate and save applicability data affected by given updated repositories.

        :param repo_criteria: The repo selection criteria
        :type repo_criteria: dict
        """
        repo_criteria = Criteria.from_dict(repo_criteria)
        repo_query_manager = managers.repo_query_manager()

        # Process repo criteria
        repo_criteria.fields = ['id']
        repo_ids = [
            r['id'] for r in repo_query_manager.find_by_criteria(repo_criteria)
        ]

        for repo_id in repo_ids:
            # Find all existing applicabilities for given repo_id
            existing_applicabilities = RepoProfileApplicability.get_collection(
            ).find({'repo_id': repo_id})
            for existing_applicability in existing_applicabilities:
                # Convert cursor to RepoProfileApplicability object
                existing_applicability = RepoProfileApplicability(
                    **dict(existing_applicability))
                profile_hash = existing_applicability['profile_hash']
                unit_profile = UnitProfile.get_collection().find_one(
                    {'profile_hash': profile_hash},
                    fields=['id', 'content_type'])
                if unit_profile is None:
                    # Unit profiles change whenever packages are installed or removed on consumers,
                    # and it is possible that existing_applicability references a UnitProfile
                    # that no longer exists. This is harmless, as Pulp has a monthly cleanup task
                    # that will identify these dangling references and remove them.
                    continue

                # Regenerate applicability data for given unit_profile and repo id
                ApplicabilityRegenerationManager.regenerate_applicability(
                    profile_hash, unit_profile['content_type'],
                    unit_profile['id'], repo_id, existing_applicability)
Beispiel #55
0
    def get(self, request, consumer_id, repo_id=None):
        """
        Fetch all bind objects referencing the specified consumer_id. Optionally,
        specify a repo_id to fetch all bind objects for the consumer_id to the repo_id.

        :param request: WSGI request object
        :type request: django.core.handlers.wsgi.WSGIRequest
        :param consumer_id: The specified consumer.
        :type  consumer_id: str
        :param repo_id: The repository to retrieve bindings for (optional)
        :type  repo_id: str

        :raises MissingResource: if some resource is missing

        :return: Response representing the bindings
        :rtype: django.http.HttpResponse
        """

        # Check to make sure the resources exist
        missing_resources = {}
        if repo_id is not None:
            repo = factory.repo_query_manager().find_by_id(repo_id)
            if repo is None:
                missing_resources['repo_id'] = repo_id
        # If get_consumer raises MissingResource we might miss reporting a bad repo_id
        try:
            factory.consumer_manager().get_consumer(consumer_id)
        except MissingResource:
            missing_resources['consumer_id'] = consumer_id

        if missing_resources:
            raise MissingResource(**missing_resources)

        manager = factory.consumer_bind_manager()
        bindings = manager.find_by_consumer(consumer_id, repo_id)
        bindings = [serialization.binding.serialize(b) for b in bindings]
        return generate_json_response_with_pulp_encoder(bindings)
Beispiel #56
0
    def find_applicable_units(self,
                              consumer_criteria=None,
                              repo_criteria=None,
                              unit_criteria=None,
                              override_config=None):
        """
        Determine and report which of the content units specified by the unit_criteria
        are applicable to consumers specified by the consumer_criteria
        with repos specified by repo_criteria. If consumer_criteria is None,
        all consumers registered to the Pulp server are checked for applicability.
        If repo_criteria is None, all repos bound to the consumer are taken
        into consideration. If unit_criteria contains an empty list for a specific type,
        all units with specific type in the repos bound to the consumer
        are taken into consideration. 

        :param consumer_criteria: The consumer selection criteria.
        :type consumer_criteria: dict

        :param repo_criteria: The repo selection criteria.
        :type repo_criteria: dict

        :param unit_criteria: A dictionary of type_id : unit selection criteria
        :type units: dict
                {<type_id1> : <unit_criteria_for_type_id1>,
                 <type_id2> : <unit_criteria_for_type_id2>}
      
        :param override_config: Additional configuration options to be accepted from user
        :type override_config: dict

        :return: applicability reports dictionary keyed by content type id
        :rtype: dict
        """
        result = {}
        conduit = ProfilerConduit()
        consumer_query_manager = managers.consumer_query_manager()
        bind_manager = managers.consumer_bind_manager()

        # Process Repo Criteria
        if repo_criteria:
            # Get repo ids satisfied by specified repo criteria
            repo_query_manager = managers.repo_query_manager()
            repo_criteria_ids = [
                r['id']
                for r in repo_query_manager.find_by_criteria(repo_criteria)
            ]
            # if repo_criteria is specified and there are no repos satisfying the criteria, return empty result
            if not repo_criteria_ids:
                return result
        else:
            repo_criteria_ids = None

        # Process Consumer Criteria
        if consumer_criteria:
            # Get consumer ids satisfied by specified consumer criteria
            consumer_ids = [
                c['id'] for c in consumer_query_manager.find_by_criteria(
                    consumer_criteria)
            ]
        else:
            if repo_criteria_ids:
                # If repo_criteria is specified, get all the consumers bound to the repos
                # satisfied by repo_criteria
                bind_criteria = Criteria(
                    filters={"repo_id": {
                        "$in": repo_criteria_ids
                    }})
                consumer_ids = [
                    b['consumer_id']
                    for b in bind_manager.find_by_criteria(bind_criteria)
                ]
                # Remove duplicate consumer ids
                consumer_ids = list(set(consumer_ids))
            else:
                # Get all consumer ids registered to the Pulp server
                consumer_ids = [
                    c['id'] for c in consumer_query_manager.find_all()
                ]
        # if there are no relevant consumers, return empty result
        if not consumer_ids:
            return result
        else:
            # Based on the consumers, get all the repos bound to the consumers in consideration
            # and find intersection of repo_criteria_ids and consumer_repo_ids
            bind_criteria = Criteria(
                filters={"consumer_id": {
                    "$in": consumer_ids
                }})
            consumer_repo_ids = [
                b['repo_id']
                for b in bind_manager.find_by_criteria(bind_criteria)
            ]
            if not repo_criteria_ids:
                repo_criteria_ids = list(set(consumer_repo_ids))
            else:
                repo_criteria_ids = list(
                    set(consumer_repo_ids) & set(repo_criteria_ids))
            if not repo_criteria_ids:
                return result

        # Create a dictionary with consumer profile and repo_ids bound to the consumer keyed by consumer id
        consumer_profile_and_repo_ids = {}
        all_relevant_repo_ids = set()
        for consumer_id in consumer_ids:
            # Find repos bound to the consumer in consideration and find an intersection of bound repos to the
            # repos specified by repo_criteria
            consumer_bound_repo_ids = [
                b['repo_id']
                for b in bind_manager.find_by_consumer(consumer_id)
            ]
            consumer_bound_repo_ids = list(set(consumer_bound_repo_ids))
            # If repo_criteria is not specified, use repos bound to the consumer, else take intersection
            # of repos specified in the criteria and repos bound to the consumer.
            if repo_criteria_ids is None:
                repo_ids = consumer_bound_repo_ids
            else:
                repo_ids = list(
                    set(consumer_bound_repo_ids) & set(repo_criteria_ids))

            if repo_ids:
                # Save all eligible repo ids to get relevant plugin unit keys when unit_criteria is not specified
                all_relevant_repo_ids = (all_relevant_repo_ids | set(repo_ids))
                consumer_profile_and_repo_ids[consumer_id] = {
                    'repo_ids': repo_ids
                }
                consumer_profile_and_repo_ids[consumer_id][
                    'profiled_consumer'] = self.__profiled_consumer(
                        consumer_id)

        if not unit_criteria:
            return result

        # Call respective profiler api according to the unit type to check for applicability
        for unit_type_id, criteria in unit_criteria.items():
            # Find a profiler for each type id and find units applicable using that profiler.
            profiler, cfg = self.__profiler(unit_type_id)
            call_config = PluginCallConfiguration(
                plugin_config=cfg,
                repo_plugin_config=None,
                override_config=override_config)
            try:
                report_list = profiler.find_applicable_units(
                    consumer_profile_and_repo_ids, unit_type_id, criteria,
                    call_config, conduit)
            except PulpExecutionException:
                report_list = None

            if report_list is None:
                _LOG.warn(
                    "Profiler for unit type [%s] is not returning applicability reports"
                    % unit_type_id)
            else:
                result[unit_type_id] = report_list

        return result
Beispiel #57
0
    def associate_from_repo(source_repo_id, dest_repo_id, criteria=None,
                            import_config_override=None):
        """
        Creates associations in a repository based on the contents of a source
        repository. Units from the source repository can be filtered by
        specifying a criteria object.

        The destination repository must have an importer that can support
        the types of units being associated. This is done by analyzing the
        unit list and the importer metadata and takes place before the
        destination repository is called.

        Pulp does not actually perform the associations as part of this call.
        The unit list is determined and passed to the destination repository's
        importer. It is the job of the importer to make the associate calls
        back into Pulp where applicable.

        If criteria is None, the effect of this call is to copy the source
        repository's associations into the destination repository.

        :param source_repo_id:         identifies the source repository
        :type  source_repo_id:         str
        :param dest_repo_id:           identifies the destination repository
        :type  dest_repo_id:           str
        :param criteria:               optional; if specified, will filter the units retrieved from
                                       the source repository
        :type  criteria:               UnitAssociationCriteria
        :param import_config_override: optional config containing values to use for this import only
        :type  import_config_override: dict
        :return:                       dict with key 'units_successful' whose
                                       value is a list of unit keys that were copied.
                                       units that were associated by this operation
        :rtype:                        dict
        :raise MissingResource:        if either of the specified repositories don't exist
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        source_repo = repo_query_manager.get_repository(source_repo_id)
        dest_repo = repo_query_manager.get_repository(dest_repo_id)

        # This will raise MissingResource if there isn't one, which is the
        # behavior we want this method to exhibit, so just let it bubble up.
        dest_repo_importer = importer_manager.get_importer(dest_repo_id)
        source_repo_importer = importer_manager.get_importer(source_repo_id)

        # The docs are incorrect on the list_importer_types call; it actually
        # returns a dict with the types under key "types" for some reason.
        supported_type_ids = plugin_api.list_importer_types(
            dest_repo_importer['importer_type_id'])['types']

        # If criteria is specified, retrieve the list of units now
        associate_us = None
        if criteria is not None:
            associate_us = load_associated_units(source_repo_id, criteria)

            # If units were supposed to be filtered but none matched, we're done
            if len(associate_us) == 0:
                # Return an empty list to indicate nothing was copied
                return {'units_successful': []}

        # Now we can make sure the destination repository's importer is capable
        # of importing either the selected units or all of the units
        associated_unit_type_ids = calculate_associated_type_ids(source_repo_id, associate_us)
        unsupported_types = [t for t in associated_unit_type_ids if t not in supported_type_ids]

        if len(unsupported_types) > 0:
            raise exceptions.InvalidValue(['types'])

        # Convert all of the units into the plugin standard representation if
        # a filter was specified
        transfer_units = None
        if associate_us is not None:
            transfer_units = create_transfer_units(associate_us, associated_unit_type_ids)

        # Convert the two repos into the plugin API model
        transfer_dest_repo = common_utils.to_transfer_repo(dest_repo)

        transfer_source_repo = common_utils.to_transfer_repo(source_repo)

        # Invoke the importer
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            dest_repo_importer['importer_type_id'])

        call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'],
                                              import_config_override)
        conduit = ImportUnitConduit(
            source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id'])

        try:
            copied_units = importer_instance.import_units(
                transfer_source_repo, transfer_dest_repo, conduit, call_config,
                units=transfer_units)
            unit_ids = [u.to_id_dict() for u in copied_units]
            return {'units_successful': unit_ids}

        except Exception:
            msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]')
            msg = msg % {'i': dest_repo_importer['importer_type_id'], 'r': dest_repo_id}
            logger.exception(msg)
            raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
Beispiel #58
0
    def update_importer_config(self, repo_id, importer_config):
        """
        Attempts to update the saved configuration for the given repo's importer.
        The importer will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param importer_config: new configuration values to use for this repo
        @type  importer_config: dict

        @raise MissingResource: if the given repo does not exist
        @raise MissingResource: if the given repo does not have an importer
        @raise InvalidConfiguration: if the plugin indicates the given
                configuration is invalid
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_importer = importer_coll.find_one({'repo_id': repo_id})
        if repo_importer is None:
            raise MissingResource(repo_id)

        importer_type_id = repo_importer['importer_type_id']
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_importer['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [
            k for k in importer_config if importer_config[k] is None
        ]
        for key in unset_property_names:
            merged_config.pop(key, None)
            importer_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(importer_config)

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            importer_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_importer_type(importer_type_id)

        transfer_related_repos = []
        for r in related_repos:

            # Don't include the repo being updated in this list
            if r['id'] == repo_id:
                continue

            all_configs = [d['config'] for d in r['importers']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = importer_instance.validate_config(transfer_repo,
                                                       call_config,
                                                       transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception(
                'Exception received from importer [%s] while validating config for repo [%s]'
                % (importer_type_id, repo_id))
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Beispiel #59
0
    def set_importer(self, repo_id, importer_type_id, repo_plugin_config):
        """
        Configures an importer to be used for the given repository.

        Keep in mind this method is written assuming single importer for a repo.
        The domain model technically supports multiple importers, but this
        call is what enforces the single importer behavior.

        @param repo_id: identifies the repo
        @type  repo_id; str

        @param importer_type_id: identifies the type of importer being added;
                                 must correspond to an importer loaded at server startup
        @type  importer_type_id: str

        @param repo_plugin_config: configuration values for the importer; may be None
        @type  repo_plugin_config: dict

        @raise MissingResource: if repo_id does not represent a valid repo
        @raise InvalidImporterConfiguration: if the importer cannot be
               initialized for the given repo
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        if not plugin_api.is_valid_importer(importer_type_id):
            raise InvalidValue(['importer_type_id'])

        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            importer_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_importer_type(importer_type_id)

        transfer_related_repos = []
        for r in related_repos:
            all_configs = [d['config'] for d in r['importers']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = importer_instance.validate_config(transfer_repo,
                                                       call_config,
                                                       transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result

        except Exception, e:
            _LOG.exception(
                'Exception received from importer [%s] while validating config'
                % importer_type_id)
            raise PulpDataException(e.args), None, sys.exc_info()[2]