示例#1
0
    def process_main(self, item=None):
        repo = self.get_repo()

        units_coll = RepoContentUnit.get_collection()
        units = self._get_units(units_coll, repo.id)

        snapshot_name = repo.notes.get(REPO_SNAPSHOT_NAME)
        if snapshot_name:
            old_units = self._get_units(units_coll, snapshot_name)
        else:
            old_units = []
        units = self._units_to_set(units)
        old_units = self._units_to_set(old_units)
        # Create a snapshot if one did not exist before (snapshot_name is
        # None) and the repo is not empty, or if the unit contents are
        # different
        if units == old_units and (snapshot_name or not units):
            return self._build_report(snapshot_name)

        now = time.time()
        suffix = time.strftime("%Y%m%d%H%M%S", time.gmtime(now))
        suffix = "__%s.%04dZ" % (suffix, 10000 * (now - int(now)))
        new_name = "%s%s" % (repo.id, suffix)
        notes = {}
        notes[REPO_SNAPSHOT_TIMESTAMP] = now
        if '_repo-type' in repo.notes:
            notes['_repo-type'] = repo.notes['_repo-type']
        notes[REPO_SNAPSHOT_NAME] = new_name
        notes[REPO_SNAPSHOT_TIMESTAMP] = now
        distributors = []
        # Fetch the repo's existing importers

        repo_importer = RepoImporter.objects.filter(repo_id=repo.id).first()
        if repo_importer is not None:
            importer_type_id = repo_importer['importer_type_id']
        else:
            importer_type_id = None

        repo_obj = repo_controller.create_repo(
            new_name,
            notes=notes,
            importer_type_id=importer_type_id,
            importer_repo_plugin_config={},
            distributor_list=distributors)
        copied = []
        for unit in sorted(units):
            copied.append(
                RepoContentUnit(
                    repo_id=new_name,
                    unit_id=unit.unit_id,
                    unit_type_id=unit.unit_type_id,
                ))
        if copied:
            units_coll.insert(copied)
        repo_controller.rebuild_content_unit_counts(repo_obj)

        group_coll = RepoGroup.get_collection()
        group_coll.update(dict(repo_ids=repo.id),
                          {'$addToSet': dict(repo_ids=new_name)})
        return self._build_report(new_name)
示例#2
0
    def unassociate_by_criteria(repo_id, criteria, notify_plugins=True):
        """
        Unassociate units that are matched by the given criteria.

        :param repo_id:        identifies the repo
        :type  repo_id:        str
        :param criteria:
        :param notify_plugins: if true, relevant plugins will be informed of the removal
        :type  notify_plugins: bool
        """
        criteria = UnitAssociationCriteria.from_dict(criteria)
        repo = model.Repository.objects.get_repo_or_missing_resource(repo_id)

        unassociate_units = load_associated_units(repo_id, criteria)

        if len(unassociate_units) == 0:
            return {}

        # Convert the units into transfer units. This happens regardless of whether or not
        # the plugin will be notified as it's used to generate the return result.
        # If all source types have been converted to mongo, search via new style.
        repo_unit_types = set(repo.content_unit_counts.keys())
        if repo_unit_types.issubset(set(plugin_api.list_unit_models())):
            transfer_units = list(
                RepoUnitAssociationManager._units_from_criteria(
                    repo, criteria))
        else:
            transfer_units = None
            if unassociate_units is not None:
                transfer_units = list(create_transfer_units(unassociate_units))

        if notify_plugins:
            remove_from_importer(repo_id, transfer_units)

        unit_map = {}  # maps unit_type_id to a list of unit_ids

        for unit in unassociate_units:
            id_list = unit_map.setdefault(unit['unit_type_id'], [])
            id_list.append(unit['unit_id'])

        collection = RepoContentUnit.get_collection()

        for unit_type_id, unit_ids in unit_map.items():
            spec = {
                'repo_id': repo_id,
                'unit_type_id': unit_type_id,
                'unit_id': {
                    '$in': unit_ids
                }
            }
            collection.remove(spec)

        repo_controller.update_last_unit_removed(repo_id)
        repo_controller.rebuild_content_unit_counts(repo)

        # Match the return type/format as copy
        serializable_units = [u.to_id_dict() for u in transfer_units]

        return {'units_successful': serializable_units}
示例#3
0
def _handle_group_category_comps(repo, type_id, unit_key, metadata, file_path, conduit, config):
    """
    Handles the creation of a package group or category.

    If a file was uploaded, treat this as upload of a comps.xml file. If no file was uploaded,
    the process only creates the unit.

    :param repo: The repository to import the package into
    :type  repo: pulp.server.db.model.Repository

    :param type_id: The type_id of the package being uploaded
    :type  type_id: str

    :param unit_key: A dictionary of fields to overwrite introspected field values
    :type  unit_key: dict

    :param metadata: A dictionary of fields to overwrite introspected field values, or None
    :type  metadata: dict or None

    :param file_path: The path to the uploaded package
    :type  file_path: str

    :param conduit: provides access to relevant Pulp functionality
    :type  conduit: pulp.plugins.conduits.upload.UploadConduit

    :param config: plugin configuration for the repository
    :type  config: pulp.plugins.config.PluginCallConfiguration
    """
    model_class = plugin_api.get_unit_model_by_id(type_id)
    update_fields_inbound(model_class, unit_key or {})
    update_fields_inbound(model_class, metadata or {})

    if file_path is not None and os.path.getsize(file_path) > 0:
        # uploading a comps.xml
        repo_id = repo.repo_id
        _get_and_save_file_units(file_path, group.process_group_element,
                                 group.GROUP_TAG, conduit, repo_id)
        _get_and_save_file_units(file_path, group.process_category_element,
                                 group.CATEGORY_TAG, conduit, repo_id)
        _get_and_save_file_units(file_path, group.process_environment_element,
                                 group.ENVIRONMENT_TAG, conduit, repo_id)
    else:
        # uploading a package group or package category
        unit_data = {}
        unit_data.update(metadata or {})
        unit_data.update(unit_key or {})
        try:
            unit = model_class(**unit_data)
        except TypeError:
            raise ModelInstantiationError()

        unit.set_content(file_path)
        unit.save()

        repo_controller.associate_single_unit(repo, unit)
        repo_controller.rebuild_content_unit_counts(repo)
示例#4
0
文件: sync.py 项目: Poil/pulp_python
    def sync(self):
        """
        Perform the repository synchronization.

        :return: The final sync report.
        :rtype:  pulp.plugins.model.SyncReport
        """
        self.process_lifecycle()
        repo_controller.rebuild_content_unit_counts(self.get_repo().repo_obj)
        return self._build_final_report()
示例#5
0
    def sync(self):
        """
        Perform the repository synchronization.

        :return: The final sync report.
        :rtype:  pulp.plugins.model.SyncReport
        """
        self.process_lifecycle()
        repo_controller.rebuild_content_unit_counts(self.get_repo().repo_obj)
        return self._build_final_report()
def migrate(*args, **kwargs):
    """
    For each puppet module check and if needed update module name format.

    There was a discrepancy in the way puppet module's name was stored in pulp, depending
    if it was synced from filesystem or uploaded. This migration finds puppet module units
    that have wrong format name and replaces it with a correct format name.
    """

    modules = Module.objects.filter(
        Q(name__contains='/') | Q(name__contains='-'))
    repos_to_rebuild = set()
    for puppet_unit in modules:
        try:
            author, name = puppet_unit['name'].split('-', 1)
        except ValueError:
            # This is the forge format, but Puppet still allows it
            author, name = puppet_unit['name'].split('/', 1)
        try:
            puppet_unit.name = name
            puppet_unit.save()
        except NotUniqueError:
            # find all repos that have this unit
            repos_with_unit = model.RepositoryContentUnit.objects.filter(
                unit_id=puppet_unit.id)
            repos_to_rebuild.update(repos_with_unit)
            # find unit with correct name
            correct_unit = Module.objects.filter(name=name).first()
            for repo in repos_with_unit:
                # unassociate wrong unit
                repo_controller.disassociate_units(repo, [puppet_unit])
                # associate correct unit to the list of the repos
                repo_controller.associate_single_unit(repo, correct_unit)

    repo_list = []
    for repo in repos_to_rebuild:
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(
            repo.repo_id)
        repo_controller.rebuild_content_unit_counts(repo_obj)
        repo_list.append(repo.repo_id)

    repos_to_republish = model.Distributor.objects.filter(
        repo_id__in=repo_list, last_publish__ne=None)
    # redirect output to file
    path = os.path.join('/var/lib/pulp', '0005_puppet_module_name_change.txt')
    f = open(path, 'w')
    f.write(str([repo.repo_id for repo in repos_to_republish]))
    f.close()
    msg = _(
        '***Note. You may want to re-publish the list of repos found in %s.\n'
        '   This migration fixed an issue with modules installation related to wrong '
        'puppet_module name.' % f.name)
    _log.info(msg)
示例#7
0
    def _do_import_modules(self, metadata):
        """
        Actual logic of the import. This method will do a best effort per module;
        if an individual module fails it will be recorded and the import will
        continue. This method will only raise an exception in an extreme case
        where it cannot react and continue.
        """
        downloader = self._create_downloader()
        self.downloader = downloader

        # Ease module lookup
        metadata_modules_by_key = dict([(m.unit_key_str, m) for m in metadata.modules])

        # Collect information about the repository's modules before changing it
        existing_module_ids_by_key = {}
        for module in Module.objects.only(*Module.unit_key_fields).all():
            existing_module_ids_by_key[module.unit_key_str] = module.id

        new_unit_keys = self._resolve_new_units(existing_module_ids_by_key.keys(),
                                                metadata_modules_by_key.keys())

        # Once we know how many things need to be processed, we can update the progress report
        self.progress_report.modules_total_count = len(new_unit_keys)
        self.progress_report.modules_finished_count = 0
        self.progress_report.modules_error_count = 0
        self.progress_report.update_progress()

        # Add new units
        for key in new_unit_keys:
            if self._canceled:
                break
            module = metadata_modules_by_key[key]
            try:
                self._add_new_module(downloader, module)
                self.progress_report.modules_finished_count += 1
            except Exception as e:
                self.progress_report.add_failed_module(module, e, sys.exc_info()[2])

            self.progress_report.update_progress()

        # Remove missing units if the configuration indicates to do so
        if self._should_remove_missing():
            remove_unit_keys = self._resolve_remove_units(existing_module_ids_by_key.keys(),
                                                          metadata_modules_by_key.keys())
            doomed_ids = [existing_module_ids_by_key[key] for key in remove_unit_keys]
            doomed_module_iterator = Module.objects.in_bulk(doomed_ids).itervalues()
            repo_controller.disassociate_units(self.repo, doomed_module_iterator)

        repo_controller.rebuild_content_unit_counts(self.repo.repo_obj)
        self.downloader = None
def migrate(*args, **kwargs):
    """
    For each puppet module check and if needed update module name format.

    There was a discrepancy in the way puppet module's name was stored in pulp, depending
    if it was synced from filesystem or uploaded. This migration finds puppet module units
    that have wrong format name and replaces it with a correct format name.
    """

    modules = Module.objects.filter(Q(name__contains="/") | Q(name__contains="-"))
    repos_to_rebuild = set()
    for puppet_unit in modules:
        try:
            author, name = puppet_unit["name"].split("-", 1)
        except ValueError:
            # This is the forge format, but Puppet still allows it
            author, name = puppet_unit["name"].split("/", 1)
        try:
            puppet_unit.name = name
            puppet_unit.save()
        except NotUniqueError:
            # find all repos that have this unit
            repos_with_unit = model.RepositoryContentUnit.objects.filter(unit_id=puppet_unit.id)
            repos_to_rebuild.update(repos_with_unit)
            # find unit with correct name
            correct_unit = Module.objects.filter(name=name).first()
            for repo in repos_with_unit:
                # unassociate wrong unit
                repo_controller.disassociate_units(repo, [puppet_unit])
                # associate correct unit to the list of the repos
                repo_controller.associate_single_unit(repo, correct_unit)

    repo_list = []
    for repo in repos_to_rebuild:
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo.repo_id)
        repo_controller.rebuild_content_unit_counts(repo_obj)
        repo_list.append(repo.repo_id)

    repos_to_republish = model.Distributor.objects.filter(repo_id__in=repo_list, last_publish__ne=None)
    # redirect output to file
    path = os.path.join("/var/lib/pulp", "0005_puppet_module_name_change.txt")
    f = open(path, "w")
    f.write(str([repo.repo_id for repo in repos_to_republish]))
    f.close()
    msg = _(
        "***Note. You may want to re-publish the list of repos found in %s.\n"
        "   This migration fixed an issue with modules installation related to wrong "
        "puppet_module name." % f.name
    )
    _log.info(msg)
示例#9
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        existing_module_ids_by_key = {}
        for module in Module.objects.only(*Module.unit_key_fields).all():
            existing_module_ids_by_key[module.unit_key_str] = module.id

        remote_unit_keys = []

        for module_path in module_paths:
            if self.canceled:
                return
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_metadata(puppet_manifest)
            remote_unit_keys.append(module.unit_key_str)

            # Even though we've already basically processed this unit, not doing this makes the
            # progress reporting confusing because it shows Pulp always importing all the modules.
            if module.unit_key_str in existing_module_ids_by_key:
                self.report.modules_total_count -= 1
                continue
            _logger.debug(IMPORT_MODULE, dict(mod=module_path))

            module.set_content(module_path)
            module.save()

            repo_controller.associate_single_unit(self.repo.repo_obj, module)

            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time() - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(existing_module_ids_by_key, remote_unit_keys)
        repo_controller.rebuild_content_unit_counts(self.repo.repo_obj)
示例#10
0
def handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path, conduit):
    """
    Handles an upload unit request to the importer. This call is responsible
    for moving the unit from its temporary location where Pulp stored the
    upload to the final storage location (as dictated by Pulp) for the unit.
    This call will also update the database in Pulp to reflect the unit
    and its association to the repository.

    :param repo: repository into which the unit is being uploaded
    :type repo: pulp.plugins.model.Repository
    :param type_id: type of unit being uploaded
    :type type_id: str
    :param unit_key: unique identifier for the unit
    :type unit_key: dict
    :param metadata: extra data about the unit
    :type metadata: dict
    :param file_path: temporary location of the uploaded file
    :type file_path: str
    :param conduit: for calls back into Pulp
    :type conduit: pulp.plugins.conduit.upload.UploadConduit
    """
    if type_id != constants.TYPE_PUPPET_MODULE:
        raise NotImplementedError()

    # Extract the metadata from the module
    extracted_data = metadata_parser.extract_metadata(file_path, repo.working_dir)

    # rename the file so it has the original module name
    original_filename = extracted_data['name'] + '-' + extracted_data['version'] + '.tar.gz'
    new_file_path = os.path.join(os.path.dirname(file_path), original_filename)
    shutil.move(file_path, new_file_path)

    # Overwrite the author and name
    extracted_data.update(Module.split_filename(extracted_data['name']))

    uploaded_module = Module.from_metadata(extracted_data)
    uploaded_module.set_content(new_file_path)
    uploaded_module.save()

    repo_controller.associate_single_unit(repo.repo_obj, uploaded_module)
    repo_controller.rebuild_content_unit_counts(repo.repo_obj)

    return {'success_flag': True, 'summary': '', 'details': {}}
示例#11
0
    def test_calculate_counts(self, mock_get_db, mock_repo_objects):
        """
        Test the calculation of the unit counts.
        This is only a marginally useful test as 90% of the work is done by the
        mongo server
        """
        mock_get_db.return_value.command.return_value = \
            {'result': [{'_id': 'type_1', 'sum': 5}, {'_id': 'type_2', 'sum': 3}]}
        repo = MagicMock(repo_id='foo')
        repo_controller.rebuild_content_unit_counts(repo)

        expected_pipeline = [
            {'$match': {'repo_id': 'foo'}},
            {'$group': {'_id': '$unit_type_id', 'sum': {'$sum': 1}}}]

        mock_get_db.return_value.command.assert_called_once_with(
            'aggregate', 'repo_content_units', pipeline=expected_pipeline
        )

        mock_repo_objects.assert_called_once_with(__raw__={'id': 'foo'})
        mock_repo_objects.return_value.update_one.assert_called_once_with(
            set__content_unit_counts={'type_1': 5, 'type_2': 3}
        )
示例#12
0
 def finalize(self):
     repo_controller.rebuild_content_unit_counts(self.get_repo())
示例#13
0
文件: upload.py 项目: alanoe/pulp
    def import_uploaded_unit(repo_id, unit_type_id, unit_key, unit_metadata, upload_id,
                             override_config=None):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        :param repo_id:       identifies the repository into which the unit is uploaded
        :type  repo_id:       str
        :param unit_type_id:  type of unit being uploaded
        :type  unit_type_id:  str
        :param unit_key:      unique identifier for the unit (user-specified)
        :type  unit_key:      dict
        :param unit_metadata: any user-specified information about the unit
        :type  unit_metadata: dict
        :param upload_id:     upload being imported
        :type  upload_id:     str
        :return:              A dictionary describing the success or failure of the upload. It must
                              contain the following keys:
                                'success_flag': bool. Indicates whether the upload was successful
                                'summary':      json-serializable object, providing summary
                                'details':      json-serializable object, providing details
        :rtype:               dict
        :raises MissingResource: if upload request was for the non-existent repository
        :raises PulpCodedException: if import was unsuccessful and it was handled by the importer
        :raises PulpException: if import was unsuccessful and it was not handled by the importer
        :raises PulpExecutionException: if an unexpected error occured during the upload
        """
        # If it doesn't raise an exception, it's good to go
        ContentUploadManager.is_valid_upload(repo_id, unit_type_id)
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id)
        repo_importer = model.Importer.objects.get_or_404(repo_id=repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'])

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'],
                                              override_config)
        transfer_repo = repo_obj.to_transfer_repo()

        file_path = ContentUploadManager._upload_file_path(upload_id)

        # Invoke the importer
        try:
            result = importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key,
                                                   unit_metadata, file_path, conduit, call_config)
            if not result['success_flag']:
                raise PulpCodedException(
                    error_code=error_codes.PLP0047, repo_id=transfer_repo.id,
                    importer_id=repo_importer['importer_type_id'],
                    unit_type=unit_type_id, summary=result['summary'], details=result['details']
                )

            repo_controller.rebuild_content_unit_counts(repo_obj)
            return result

        except PulpException:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise
        except Exception, e:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise PulpExecutionException(e), None, sys.exc_info()[2]
示例#14
0
 def finalize(self):
     repo_controller.rebuild_content_unit_counts(self.get_repo())
示例#15
0
文件: sync.py 项目: dkliban/pulp_rpm
            except Exception, e:
                # In case other exceptions were caught that are not related to the state of the
                # mirror, raise the exception immediately and do not iterate throught the rest
                # of the mirrors.
                _logger.exception(e)
                self._set_failed_state(e)
                report = self.sync_conduit.build_failure_report(self._progress_summary,
                                                                self.progress_status)
                return report

            finally:
                # clean up whatever we may have left behind
                shutil.rmtree(self.tmp_dir, ignore_errors=True)
                # recalculate all the unit counts
                repo_controller.rebuild_content_unit_counts(self.repo)

            self.save_repomd_revision()
            _logger.info(_('Sync complete.'))
            return self.sync_conduit.build_success_report(self._progress_summary,
                                                          self.progress_status)

    def _set_failed_state(self, exception):
        """
        Sets failed state of the task and caught error in the progress status.

        :param exception: caught exception
        :type: Exception
        """
        for step, value in self.progress_status.iteritems():
            if value.get('state') == constants.STATE_RUNNING:
示例#16
0
    def import_uploaded_unit(repo_id,
                             unit_type_id,
                             unit_key,
                             unit_metadata,
                             upload_id,
                             override_config=None):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        :param repo_id:       identifies the repository into which the unit is uploaded
        :type  repo_id:       str
        :param unit_type_id:  type of unit being uploaded
        :type  unit_type_id:  str
        :param unit_key:      unique identifier for the unit (user-specified)
        :type  unit_key:      dict
        :param unit_metadata: any user-specified information about the unit
        :type  unit_metadata: dict
        :param upload_id:     upload being imported
        :type  upload_id:     str
        :return:              A dictionary describing the success or failure of the upload. It must
                              contain the following keys:
                                'success_flag': bool. Indicates whether the upload was successful
                                'summary':      json-serializable object, providing summary
                                'details':      json-serializable object, providing details
        :rtype:               dict
        :raises MissingResource: if upload request was for the non-existent repository
        :raises PulpCodedException: if import was unsuccessful and it was handled by the importer
        :raises PulpException: if import was unsuccessful and it was not handled by the importer
        :raises PulpExecutionException: if an unexpected error occured during the upload
        """
        # If it doesn't raise an exception, it's good to go
        ContentUploadManager.is_valid_upload(repo_id, unit_type_id)
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(
            repo_id)
        repo_importer = model.Importer.objects.get_or_404(repo_id=repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'])

        call_config = PluginCallConfiguration(plugin_config,
                                              repo_importer['config'],
                                              override_config)
        transfer_repo = repo_obj.to_transfer_repo()

        file_path = ContentUploadManager._upload_file_path(upload_id)

        # Invoke the importer
        try:
            result = importer_instance.upload_unit(transfer_repo, unit_type_id,
                                                   unit_key, unit_metadata,
                                                   file_path, conduit,
                                                   call_config)
            if not result['success_flag']:
                raise PulpCodedException(
                    error_code=error_codes.PLP0047,
                    repo_id=transfer_repo.id,
                    importer_id=repo_importer['importer_type_id'],
                    unit_type=unit_type_id,
                    summary=result['summary'],
                    details=result['details'])

            repo_controller.rebuild_content_unit_counts(repo_obj)
            return result

        except PulpException:
            msg = _(
                'Error from the importer while importing uploaded unit to repository [%(r)s]'
            )
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise
        except Exception, e:
            msg = _(
                'Error from the importer while importing uploaded unit to repository [%(r)s]'
            )
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise PulpExecutionException(e), None, sys.exc_info()[2]
示例#17
0
    def associate_from_repo(source_repo_id, dest_repo_id, criteria,
                            import_config_override=None):
        """
        Creates associations in a repository based on the contents of a source
        repository. Units from the source repository can be filtered by
        specifying a criteria object.

        The destination repository must have an importer that can support
        the types of units being associated. This is done by analyzing the
        unit list and the importer metadata and takes place before the
        destination repository is called.

        Pulp does not actually perform the associations as part of this call.
        The unit list is determined and passed to the destination repository's
        importer. It is the job of the importer to make the associate calls
        back into Pulp where applicable.

        If criteria is None, the effect of this call is to copy the source
        repository's associations into the destination repository.

        :param source_repo_id:         identifies the source repository
        :type  source_repo_id:         str
        :param dest_repo_id:           identifies the destination repository
        :type  dest_repo_id:           str
        :param criteria:               optional; if specified, will filter the units retrieved from
                                       the source repository
        :type  criteria:               pulp.server.db.model.criteria.UnitAssociationCriteria
        :param import_config_override: optional config containing values to use for this import only
        :type  import_config_override: dict
        :return:                       dict with key 'units_successful' whose
                                       value is a list of unit keys that were copied.
                                       units that were associated by this operation
        :rtype:                        dict
        :raise MissingResource:        if either of the specified repositories don't exist
        """
        criteria = UnitAssociationCriteria.from_dict(criteria)
        source_repo = model.Repository.objects.get_repo_or_missing_resource(source_repo_id)
        dest_repo = model.Repository.objects.get_repo_or_missing_resource(dest_repo_id)

        dest_repo_importer = model.Importer.objects.get_or_404(repo_id=dest_repo_id)
        source_repo_importer = model.Importer.objects.get_or_404(repo_id=source_repo_id)

        # The docs are incorrect on the list_importer_types call; it actually
        # returns a dict with the types under key "types" for some reason.
        supported_type_ids = set(plugin_api.list_importer_types(
            dest_repo_importer.importer_type_id)['types'])

        # Get the unit types from the repo source repo
        source_repo_unit_types = set(source_repo.content_unit_counts.keys())

        # Now we can make sure the destination repository's importer is capable
        # of importing either the selected units or all of the units
        if not source_repo_unit_types.issubset(supported_type_ids):
            raise exceptions.PulpCodedException(error_code=error_codes.PLP0044)
        transfer_units = None
        # if all source types have been converted to mongo - search via new style
        if source_repo_unit_types.issubset(set(plugin_api.list_unit_models())):
            transfer_units = RepoUnitAssociationManager._units_from_criteria(source_repo, criteria)
        else:
            # else, search via old style
            associate_us = load_associated_units(source_repo_id, criteria)
            # If units were supposed to be filtered but none matched, we're done
            if len(associate_us) == 0:
                # Return an empty list to indicate nothing was copied
                return {'units_successful': []}
            # Convert all of the units into the plugin standard representation if
            # a filter was specified
            transfer_units = None
            if associate_us is not None:
                transfer_units = create_transfer_units(associate_us)

        # Convert the two repos into the plugin API model
        transfer_dest_repo = dest_repo.to_transfer_repo()
        transfer_source_repo = source_repo.to_transfer_repo()

        # Invoke the importer
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            dest_repo_importer.importer_type_id)

        call_config = PluginCallConfiguration(plugin_config, dest_repo_importer.config,
                                              import_config_override)
        conduit = ImportUnitConduit(
            source_repo_id, dest_repo_id, source_repo_importer.importer_type_id,
            dest_repo_importer.importer_type_id)

        try:
            copied_units = importer_instance.import_units(
                transfer_source_repo, transfer_dest_repo, conduit, call_config,
                units=transfer_units)
            if isinstance(copied_units, tuple):
                suc_units_ids = [u.to_id_dict() for u in copied_units[0] if u is not None]
                unsuc_units_ids = [u.to_id_dict() for u in copied_units[1]]
                repo_controller.rebuild_content_unit_counts(dest_repo)
                return {'units_successful': suc_units_ids,
                        'units_failed_signature_filter': unsuc_units_ids}
            unit_ids = [u.to_id_dict() for u in copied_units if u is not None]
            repo_controller.rebuild_content_unit_counts(dest_repo)
            return {'units_successful': unit_ids}
        except Exception as e:
            msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]')
            msg_dict = {'i': dest_repo_importer.importer_type_id, 'r': dest_repo_id}
            logger.exception(msg % msg_dict)
            raise (e, None, sys.exc_info()[2])
示例#18
0
def _handle_package(repo, type_id, unit_key, metadata, file_path, conduit, config):
    """
    Handles the upload for an RPM or SRPM.

    This inspects the package contents to determine field values. The unit_key
    and metadata fields overwrite field values determined through package inspection.

    :param repo: The repository to import the package into
    :type  repo: pulp.server.db.model.Repository

    :param type_id: The type_id of the package being uploaded
    :type  type_id: str

    :param unit_key: A dictionary of fields to overwrite introspected field values
    :type  unit_key: dict

    :param metadata: A dictionary of fields to overwrite introspected field values, or None
    :type  metadata: dict or None

    :param file_path: The path to the uploaded package
    :type  file_path: str

    :param conduit: provides access to relevant Pulp functionality
    :type  conduit: pulp.plugins.conduits.upload.UploadConduit

    :param config: plugin configuration for the repository
    :type  config: pulp.plugins.config.PluginCallConfiguration

    :raises PulpCodedException PLP1005: if the checksum type from the user is not recognized
    :raises PulpCodedException PLP1013: if the checksum value from the user does not validate
    """
    try:
        rpm_data = _extract_rpm_data(type_id, file_path)
    except:
        _LOGGER.exception('Error extracting RPM metadata for [%s]' % file_path)
        raise

    model_class = plugin_api.get_unit_model_by_id(type_id)
    update_fields_inbound(model_class, unit_key or {})
    update_fields_inbound(model_class, metadata or {})

    # set checksum and checksumtype
    if metadata:
        checksumtype = metadata.pop('checksumtype', verification.TYPE_SHA256)
        rpm_data['checksumtype'] = verification.sanitize_checksum_type(checksumtype)
        if 'checksum' in metadata:
            rpm_data['checksum'] = metadata.pop('checksum')
            try:
                with open(file_path) as dest_file:
                    verification.verify_checksum(dest_file, rpm_data['checksumtype'],
                                                 rpm_data['checksum'])
            except verification.VerificationException:
                raise PulpCodedException(error_code=platform_errors.PLP1013)
        else:
            rpm_data['checksum'] = _calculate_checksum(rpm_data['checksumtype'], file_path)
    else:
        rpm_data['checksumtype'] = verification.TYPE_SHA256
        rpm_data['checksum'] = _calculate_checksum(rpm_data['checksumtype'], file_path)

    # Update the RPM-extracted data with anything additional the user specified.
    # Allow the user-specified values to override the extracted ones.
    rpm_data.update(metadata or {})
    rpm_data.update(unit_key or {})

    # Validate the user specified data by instantiating the model
    try:
        unit = model_class(**rpm_data)
    except TypeError:
        raise ModelInstantiationError()

    # Extract the repodata snippets
    unit.repodata = rpm_parse.get_package_xml(file_path, sumtype=unit.checksumtype)
    _update_provides_requires(unit)

    # check if the unit has duplicate nevra
    purge.remove_unit_duplicate_nevra(unit, repo)

    unit.set_content(file_path)
    unit.save()

    repo_controller.associate_single_unit(repo, unit)
    repo_controller.rebuild_content_unit_counts(repo)
示例#19
0
    def associate_from_repo(cls, source_repo_id, dest_repo_id, criteria,
                            import_config_override=None):
        """
        Creates associations in a repository based on the contents of a source
        repository. Units from the source repository can be filtered by
        specifying a criteria object.

        The destination repository must have an importer that can support
        the types of units being associated. This is done by analyzing the
        unit list and the importer metadata and takes place before the
        destination repository is called.

        Pulp does not actually perform the associations as part of this call.
        The unit list is determined and passed to the destination repository's
        importer. It is the job of the importer to make the associate calls
        back into Pulp where applicable.

        If criteria is None, the effect of this call is to copy the source
        repository's associations into the destination repository.

        :param source_repo_id:         identifies the source repository
        :type  source_repo_id:         str
        :param dest_repo_id:           identifies the destination repository
        :type  dest_repo_id:           str
        :param criteria:               optional; if specified, will filter the units retrieved from
                                       the source repository
        :type  criteria:               pulp.server.db.model.criteria.UnitAssociationCriteria
        :param import_config_override: optional config containing values to use for this import only
        :type  import_config_override: dict
        :return:                       dict with key 'units_successful' whose
                                       value is a list of unit keys that were copied.
                                       units that were associated by this operation
        :rtype:                        dict
        :raise MissingResource:        if either of the specified repositories don't exist
        """
        criteria = UnitAssociationCriteria.from_dict(criteria)
        source_repo = model.Repository.objects.get_repo_or_missing_resource(source_repo_id)
        dest_repo = model.Repository.objects.get_repo_or_missing_resource(dest_repo_id)

        dest_repo_importer = model.Importer.objects.get_or_404(repo_id=dest_repo_id)
        source_repo_importer = model.Importer.objects.get_or_404(repo_id=source_repo_id)

        # The docs are incorrect on the list_importer_types call; it actually
        # returns a dict with the types under key "types" for some reason.
        supported_type_ids = set(plugin_api.list_importer_types(
            dest_repo_importer.importer_type_id)['types'])

        # Get the unit types from the repo source repo
        source_repo_unit_types = set(source_repo.content_unit_counts.keys())

        # Now we can make sure the destination repository's importer is capable
        # of importing either the selected units or all of the units
        if not source_repo_unit_types.issubset(supported_type_ids):
            raise exceptions.PulpCodedException(error_code=error_codes.PLP0044)
        transfer_units = None
        # if all source types have been converted to mongo - search via new style
        if source_repo_unit_types.issubset(set(plugin_api.list_unit_models())):
            transfer_units = cls._units_from_criteria(source_repo, criteria)
        else:
            # else, search via old style
            associate_us = load_associated_units(source_repo_id, criteria)
            # If units were supposed to be filtered but none matched, we're done
            if len(associate_us) == 0:
                # Return an empty list to indicate nothing was copied
                return {'units_successful': []}
            # Convert all of the units into the plugin standard representation if
            # a filter was specified
            transfer_units = None
            if associate_us is not None:
                transfer_units = create_transfer_units(associate_us)

        # Convert the two repos into the plugin API model
        transfer_dest_repo = dest_repo.to_transfer_repo()
        transfer_source_repo = source_repo.to_transfer_repo()

        # Invoke the importer
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            dest_repo_importer.importer_type_id)

        call_config = PluginCallConfiguration(plugin_config, dest_repo_importer.config,
                                              import_config_override)
        conduit = ImportUnitConduit(
            source_repo_id, dest_repo_id, source_repo_importer.importer_type_id,
            dest_repo_importer.importer_type_id)

        try:
            copied_units = importer_instance.import_units(
                transfer_source_repo, transfer_dest_repo, conduit, call_config,
                units=transfer_units)
            if isinstance(copied_units, tuple):
                suc_units_ids = [u.to_id_dict() for u in copied_units[0] if u is not None]
                unsuc_units_ids = [u.to_id_dict() for u in copied_units[1]]
                repo_controller.rebuild_content_unit_counts(dest_repo)
                return {'units_successful': suc_units_ids,
                        'units_failed_signature_filter': unsuc_units_ids}
            unit_ids = [u.to_id_dict() for u in copied_units if u is not None]
            repo_controller.rebuild_content_unit_counts(dest_repo)
            return {'units_successful': unit_ids}
        except Exception:
            msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]')
            msg_dict = {'i': dest_repo_importer.importer_type_id, 'r': dest_repo_id}
            logger.exception(msg % msg_dict)
            raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
示例#20
0
    def import_uploaded_unit(repo_id, unit_type_id, unit_key, unit_metadata, upload_id,
                             override_config=None):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        :param repo_id:       identifies the repository into which the unit is uploaded
        :type  repo_id:       str
        :param unit_type_id:  type of unit being uploaded
        :type  unit_type_id:  str
        :param unit_key:      unique identifier for the unit (user-specified)
        :type  unit_key:      dict
        :param unit_metadata: any user-specified information about the unit
        :type  unit_metadata: dict
        :param upload_id:     upload being imported
        :type  upload_id:     str
        :return:              A SyncReport indicating the success or failure of the upload
        :rtype:               pulp.plugins.model.SyncReport
        """
        # If it doesn't raise an exception, it's good to go
        ContentUploadManager.is_valid_upload(repo_id, unit_type_id)
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id)
        repo_importer = model.Importer.objects.get_or_404(repo_id=repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'])

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'],
                                              override_config)
        transfer_repo = repo_obj.to_transfer_repo()

        file_path = ContentUploadManager._upload_file_path(upload_id)

        # Invoke the importer
        try:
            result = importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key,
                                                   unit_metadata, file_path, conduit, call_config)
            repo_controller.rebuild_content_unit_counts(repo_obj)
            return result

        except PulpException:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise
        except Exception, e:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise PulpExecutionException(e), None, sys.exc_info()[2]