Example #1
0
def remove_from_importer(repo_id, transfer_units):

    # Retrieve the repo from the database and convert to the transfer repo
    repo_query_manager = manager_factory.repo_query_manager()
    repo = repo_query_manager.get_repository(repo_id)

    importer_manager = manager_factory.repo_importer_manager()
    repo_importer = importer_manager.get_importer(repo_id)

    transfer_repo = common_utils.to_transfer_repo(repo)
    transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'],
                                                                  repo_id, mkdir=True)

    # Retrieve the plugin instance to invoke
    importer_instance, plugin_config = plugin_api.get_importer_by_id(
        repo_importer['importer_type_id'])
    call_config = PluginCallConfiguration(plugin_config, repo_importer['config'])

    # Invoke the importer's remove method
    try:
        importer_instance.remove_units(transfer_repo, transfer_units, call_config)
    except Exception:
        msg = _('Exception from importer [%(i)s] while removing units from repo [%(r)s]')
        msg = msg % {'i': repo_importer['id'], 'r': repo_id}
        logger.exception(msg)
Example #2
0
    def publish(repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None

        :return: report of the details of the publish
        :rtype: pulp.server.db.model.repository.RepoPublishResult
        """
        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(repository=repo_id,
                                  distributor=distributor_id)

        distributor_instance, distributor_config = RepoPublishManager.\
            _get_distributor_instance_and_config(repo_id, distributor_id)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config,
                                              repo_distributor['config'],
                                              publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.get_working_directory()

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = RepoPublishManager._do_publish(repo, distributor_id,
                                                distributor_instance,
                                                transfer_repo, conduit,
                                                call_config)
        fire_manager.fire_repo_publish_finished(result)

        return result
Example #3
0
def remove_from_importer(repo_id, transfer_units):
    # Retrieve the repo from the database and convert to the transfer repo
    repo_query_manager = manager_factory.repo_query_manager()
    repo = repo_query_manager.get_repository(repo_id)

    importer_manager = manager_factory.repo_importer_manager()
    repo_importer = importer_manager.get_importer(repo_id)

    transfer_repo = common_utils.to_transfer_repo(repo)
    transfer_repo.working_dir = common_utils.importer_working_dir(
        repo_importer['importer_type_id'], repo_id, mkdir=True)

    # Retrieve the plugin instance to invoke
    importer_instance, plugin_config = plugin_api.get_importer_by_id(
        repo_importer['importer_type_id'])
    call_config = PluginCallConfiguration(plugin_config,
                                          repo_importer['config'])

    # Invoke the importer's remove method
    try:
        importer_instance.remove_units(transfer_repo, transfer_units,
                                       call_config)
    except Exception:
        msg = _(
            'Exception from importer [%(i)s] while removing units from repo [%(r)s]'
        )
        msg = msg % {'i': repo_importer['id'], 'r': repo_id}
        logger.exception(msg)
Example #4
0
    def sync(self, repo_id, sync_config_override=None):
        """
        Performs a synchronize operation on the given repository.

        The given repo must have an importer configured. The identity of the
        importer is not a parameter to this call; if multiple importers are
        eventually supported this will have to change to indicate which
        importer to use.

        This method is intentionally limited to synchronizing a single repo.
        Performing multiple repository syncs concurrently will require a more
        global view of the server and must be handled outside the scope of this
        class.

        @param repo_id: identifies the repo to sync
        @type  repo_id: str

        @param sync_config_override: optional config containing values to use
                                     for this sync only
        @type  sync_config_override: dict

        @raise MissingResource: if repo_id does not refer to a valid repo
        @raise OperationFailed: if the given repo does not have an importer set
        """

        repo_coll = Repo.get_collection()

        # Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        importer_instance, importer_config = self._get_importer_instance_and_config(repo_id)

        if importer_instance is None:
            raise MissingResource(repo_id)

        dispatch_context = dispatch_factory.context()
        dispatch_context.set_cancel_control_hook(importer_instance.cancel_sync_repo)

        importer_manager = manager_factory.repo_importer_manager()
        repo_importer = importer_manager.get_importer(repo_id)

        # Assemble the data needed for the sync
        conduit = RepoSyncConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_IMPORTER, repo_importer['id'])

        call_config = PluginCallConfiguration(importer_config, repo_importer['config'], sync_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True)

        # Fire an events around the call
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_sync_started(repo_id)
        sync_result = self._do_sync(repo, importer_instance, transfer_repo, conduit, call_config)
        fire_manager.fire_repo_sync_finished(sync_result)

        dispatch_context.clear_cancel_control_hook()

        if sync_result['result'] == RepoSyncResult.RESULT_FAILED:
            raise PulpExecutionException(_('Importer indicated a failed response'))
Example #5
0
def remove_from_importer(repo_id, removed_units):

    # Retrieve the repo from the database and convert to the transfer repo
    repo_query_manager = manager_factory.repo_query_manager()
    repo = repo_query_manager.get_repository(repo_id)

    importer_manager = manager_factory.repo_importer_manager()
    repo_importer = importer_manager.get_importer(repo_id)

    transfer_repo = common_utils.to_transfer_repo(repo)
    transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True)

    # Convert the units into transfer units
    unit_type_ids = calculate_associated_type_ids(repo_id, removed_units)
    transfer_units = create_transfer_units(removed_units, unit_type_ids)

    # Retrieve the plugin instance to invoke
    importer_instance, plugin_config = plugin_api.get_importer_by_id(repo_importer['importer_type_id'])
    call_config = PluginCallConfiguration(plugin_config, repo_importer['config'])

    # Invoke the importer's remove method
    try:
        importer_instance.remove_units(transfer_repo, transfer_units, call_config)
    except Exception:
        _LOG.exception('Exception from importer [%s] while removing units from repo [%s]' % (repo_importer['id'], repo_id))
Example #6
0
    def create_bind_payload(self, repo_id, distributor_id):
        """
        Requests the distributor plugin to generate the consumer bind payload.

        @param repo_id: identifies the repo being bound
        @type  repo_id: str

        @param distributor_id: identifies the distributor
        @type  distributor_id: str

        @return: payload object to pass to the consumer
        @rtype:  dict

        @raise MissingResource: if the repo or distributor do not exist
        @raise PulpExecutionException: if the distributor raises an error
        """

        # Input Validation
        repo_distributor = self.get_distributor(repo_id, distributor_id)
        repo = Repo.get_collection().find_one({'id' : repo_id})

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, repo_distributor['config'])
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(distributor_type_id, repo_id)

        try:
            payload = distributor_instance.create_consumer_payload(transfer_repo, call_config)
            return payload
        except Exception:
            _LOG.exception('Exception raised from distributor [%s] generating consumer payload' % distributor_id)
            raise PulpExecutionException(), None, sys.exc_info()[2]
Example #7
0
    def set_importer(repo_id, importer_type_id, repo_plugin_config):
        """
        Configures an importer to be used for the given repository.

        Keep in mind this method is written assuming single importer for a repo.
        The domain model technically supports multiple importers, but this
        call is what enforces the single importer behavior.

        :param repo_id:                      identifies the repo
        :type  repo_id:                      str
        :param importer_type_id:             identifies the type of importer being added;
                                             must correspond to an importer loaded at server startup
        :type  importer_type_id:             str
        :param repo_plugin_config:           configuration values for the importer; may be None
        :type  repo_plugin_config:           dict
        :raise MissingResource:              if repo_id does not represent a valid repo
        :raise InvalidImporterConfiguration: if the importer cannot be initialized for the given
                                             repo
        """
        RepoImporterManager.validate_importer_config(repo_id, importer_type_id, repo_plugin_config)
        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        repo = repo_coll.find_one({'id': repo_id})
        importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(importer_type_id, repo_id)

        # Remove old importer if one exists
        try:
            RepoImporterManager.remove_importer(repo_id)
        except MissingResource:
            pass  # it didn't exist, so no harm done

        # Let the importer plugin initialize the repository
        try:
            importer_instance.importer_added(transfer_repo, call_config)
        except Exception:
            _logger.exception(
                'Error initializing importer [%s] for repo [%s]' % (importer_type_id, repo_id))
            raise PulpExecutionException(), None, sys.exc_info()[2]

        # Database Update
        importer_id = importer_type_id  # use the importer name as its repo ID

        importer = RepoImporter(repo_id, importer_id, importer_type_id, clean_config)
        importer_coll.save(importer, safe=True)

        return importer
Example #8
0
    def set_importer(repo_id, importer_type_id, repo_plugin_config):
        """
        Configures an importer to be used for the given repository.

        Keep in mind this method is written assuming single importer for a repo.
        The domain model technically supports multiple importers, but this
        call is what enforces the single importer behavior.

        :param repo_id:                      identifies the repo
        :type  repo_id:                      str
        :param importer_type_id:             identifies the type of importer being added;
                                             must correspond to an importer loaded at server startup
        :type  importer_type_id:             str
        :param repo_plugin_config:           configuration values for the importer; may be None
        :type  repo_plugin_config:           dict
        :raise MissingResource:              if repo_id does not represent a valid repo
        :raise InvalidImporterConfiguration: if the importer cannot be initialized for the given
                                             repo
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        if not plugin_api.is_valid_importer(importer_type_id):
            raise InvalidValue(['importer_type_id'])

        importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(importer_type_id, repo_id)

        try:
            result = importer_instance.validate_config(transfer_repo, call_config)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result

        except Exception, e:
            logger.exception(
                'Exception received from importer [%s] while validating config' % importer_type_id)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #9
0
    def resolve_dependencies_by_units(repo_id, units, options):
        """
        Calculates dependencies for the given set of units in the given
        repository.

        :param repo_id:         identifies the repository
        :type  repo_id:         str
        :param units:           list of database representations of units to resolve dependencies
                                for
        :type  units:           list
        :param options:         dict of options to pass the importer to drive the resolution
        :type  options:         dict or None
        :return:                report from the plugin
        :rtype:                 object
        :raise MissingResource: if the repo does not exist or does not have an importer
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        # The following will raise MissingResource as appropriate
        repo = repo_query_manager.get_repository(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Package for the importer call
        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], options)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            repo_importer['importer_type_id'], repo_id, mkdir=True)

        conduit = DependencyResolutionConduit(repo_id, repo_importer['id'])

        # Convert all of the units into the plugin standard representation
        transfer_units = []

        # Preload all the type defs so we don't hammer the database unnecessarily
        type_defs = {}
        all_type_def_ids = set([u['unit_type_id'] for u in units])
        for def_id in all_type_def_ids:
            type_def = types_db.type_definition(def_id)
            type_defs[def_id] = type_def

        for unit in units:
            type_id = unit['unit_type_id']
            u = conduit_common_utils.to_plugin_associated_unit(unit, type_defs[type_id])
            transfer_units.append(u)

        # Invoke the importer
        try:
            dep_report = importer_instance.resolve_dependencies(transfer_repo, transfer_units,
                                                                conduit, call_config)
        except Exception, e:
            raise PulpExecutionException(), None, sys.exc_info()[2]
Example #10
0
    def __init__(self, repo_group, publish_conduit, config, distributor_type):
        """
        :param repo_group: Pulp managed Yum repository
        :type  repo_group: pulp.plugins.model.RepositoryGroup
        :param publish_conduit: Conduit providing access to relative Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoGroupPublishConduit
        :param config: Pulp configuration for the distributor
        :type  config: pulp.plugins.config.PluginCallConfiguration
        :param distributor_type: The type of the distributor that is being published
        :type distributor_type: str
        """
        super(ExportRepoGroupPublisher, self).__init__(constants.PUBLISH_STEP_EXPORT_REPO_GROUP,
                                                       repo_group, publish_conduit, config,
                                                       working_dir=repo_group.working_dir,
                                                       distributor_type=distributor_type)

        working_dir = self.get_working_dir()
        scratch_dir = os.path.join(working_dir, 'scratch')
        realized_dir = os.path.join(working_dir, 'realized')

        flat_config = config.flatten()
        export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
        if export_dir:
            repo_config = config
        else:
            repo_config = PluginCallConfiguration(flat_config, {constants.EXPORT_DIRECTORY_KEYWORD:
                                                                realized_dir})
        query_manager = RepoQueryManager()

        repos = query_manager.find_by_id_list(repo_group.repo_ids)
        empty_repos = True
        for repo in repos:
            empty_repos = False
            repo = common_utils.to_transfer_repo(repo)
            # Make sure we only publish rpm repo's
            if repo.notes['_repo-type'] != 'rpm-repo':
                continue

            repo_config_copy = copy.deepcopy(repo_config)
            repo.working_dir = os.path.join(scratch_dir, repo.id)
            repo_conduit = RepoPublishConduit(repo.id, distributor_type)
            publisher = ExportRepoPublisher(repo, repo_conduit, repo_config_copy,
                                            distributor_type)
            publisher.description = _("Exporting Repo: %s") % repo.id
            self.add_child(publisher)
        if empty_repos:
            os.makedirs(realized_dir)
            self.add_child(GenerateListingFileStep(realized_dir, realized_dir))

        # If we aren't exporting to a directory add the ISO create & publish steps
        if not export_dir:
            # Create the steps to generate the ISO and publish them to their final location
            output_dir = os.path.join(working_dir, 'output')
            self.add_child(CreateIsoStep(realized_dir, output_dir))
            export_dirs = configuration.get_export_repo_group_publish_dirs(repo_group, config)
            publish_location = [('/', location) for location in export_dirs]

            master_dir = configuration.get_master_publish_dir(repo_group, distributor_type)
            self.add_child(AtomicDirectoryPublishStep(output_dir, publish_location, master_dir))
Example #11
0
    def resolve_dependencies_by_units(repo_id, units, options):
        """
        Calculates dependencies for the given set of units in the given
        repository.

        :param repo_id:         identifies the repository
        :type  repo_id:         str
        :param units:           list of database representations of units to resolve dependencies
                                for
        :type  units:           list
        :param options:         dict of options to pass the importer to drive the resolution
        :type  options:         dict or None
        :return:                report from the plugin
        :rtype:                 object
        :raise MissingResource: if the repo does not exist or does not have an importer
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        # The following will raise MissingResource as appropriate
        repo = repo_query_manager.get_repository(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Package for the importer call
        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], options)
        transfer_repo = common_utils.to_transfer_repo(repo)

        conduit = DependencyResolutionConduit(repo_id, repo_importer['id'])

        # Convert all of the units into the plugin standard representation
        transfer_units = []

        # Preload all the type defs so we don't hammer the database unnecessarily
        type_defs = {}
        all_type_def_ids = set([u['unit_type_id'] for u in units])
        for def_id in all_type_def_ids:
            type_def = types_db.type_definition(def_id)
            type_defs[def_id] = type_def

        for unit in units:
            type_id = unit['unit_type_id']
            u = conduit_common_utils.to_plugin_associated_unit(unit, type_defs[type_id])
            transfer_units.append(u)

        # Invoke the importer
        try:
            dep_report = importer_instance.resolve_dependencies(transfer_repo, transfer_units,
                                                                conduit, call_config)
        except Exception:
            raise PulpExecutionException(), None, sys.exc_info()[2]

        return dep_report
Example #12
0
    def publish(self, repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None

        :return: report of the details of the publish
        :rtype: pulp.server.plugins.model.PublishReport
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id})
        if repo_distributor is None:
            raise MissingResource(repository=repo_id, distributor=distributor_id)

        distributor_instance, distributor_config = self._get_distributor_instance_and_config(repo_id, distributor_id)

        if distributor_instance is None:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        dispatch_context = dispatch_factory.context()
        dispatch_context.set_cancel_control_hook(distributor_instance.cancel_publish_repo)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config, repo_distributor['config'], publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(repo_distributor['distributor_type_id'], repo_id, mkdir=True)

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = self._do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config)
        fire_manager.fire_repo_publish_finished(result)

        dispatch_context.clear_cancel_control_hook()

        return result
Example #13
0
    def import_uploaded_unit(self, repo_id, unit_type_id, unit_key, unit_metadata, upload_id):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        @param repo_id: identifies the repository into which the unit is uploaded
        @type  repo_id: str

        @param unit_type_id: type of unit being uploaded
        @type  unit_type_id: str

        @param unit_key: unique identifier for the unit (user-specified)
        @type  unit_key: dict

        @param unit_metadata: any user-specified information about the unit
        @type  unit_metadata: dict

        @param upload_id: upload being imported
        @type  upload_id: str
        """

        # If it doesn't raise an exception, it's good to go
        self.is_valid_upload(repo_id, unit_type_id)

        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        repo = repo_query_manager.find_by_id(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER, pulp_principal.get_principal()['login'])

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'], None)
        transfer_repo = repo_common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = repo_common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True)

        file_path = self._upload_file_path(upload_id)

        # Invoke the importer
        try:
            # def upload_unit(self, type_id, unit_key, metadata, file_path, conduit, config):
            report = importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key, unit_metadata, file_path, conduit, call_config)
        except Exception, e:
            _LOG.exception('Error from the importer while importing uploaded unit to repository [%s]' % repo_id)
            raise PulpExecutionException(e), None, sys.exc_info()[2]
Example #14
0
    def publish(self, repo_id, distributor_id, distributor_instance=None, distributor_config=None, publish_config_override=None, base_progress_report=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param distributor_instance: the distributor instance for this repo and this publish
        @type distributor_instance: pulp.plugins.distributor.Distributor

        @param distributor_config: base configuration for the distributor
        @type distributor_config: dict

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None

        @param base_progress_report: basis for the progress report to be built on;
               this method will cause this instance to be modified
        @type  base_progress_report: dict
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id})
        if repo_distributor is None:
            raise MissingResource(repository=repo_id, distributor=distributor_id)

        if distributor_instance is None:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id, base_progress_report=base_progress_report)

        call_config = PluginCallConfiguration(distributor_config, repo_distributor['config'], publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(repo_distributor['distributor_type_id'], repo_id, mkdir=True)

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = self._do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config)
        fire_manager.fire_repo_publish_finished(result)
Example #15
0
    def test_to_transfer_repo_unit_timestamps_not_specified(self):
        data = {
            'id': 'foo',
            'display_name': 'bar',
            'description': 'baz',
            'notes': 'qux',
            'content_unit_counts': {'units': 1}
        }

        repo = to_transfer_repo(data)
        self.assertEquals(None, repo.last_unit_added)
        self.assertEquals(None, repo.last_unit_removed)
Example #16
0
    def validate_importer_config(repo_id, importer_type_id, importer_config):
        """
        Validate an importer configuration. This validates that the repository and importer type
        exist as these are both required to validate the configuration.

        :param repo_id:             identifies the repo
        :type  repo_id:             str
        :param importer_type_id:    identifies the type of importer being added;
                                    must correspond to an importer loaded at server startup
        :type  importer_type_id:    str
        :param importer_config:     configuration values for the importer; may be None
        :type  importer_config:     dict
        """
        repo_coll = Repo.get_collection()
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        if not plugin_api.is_valid_importer(importer_type_id):
            raise PulpCodedValidationException(
                error_code=error_codes.PLP1008,
                importer_type_id=importer_type_id)

        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if importer_config is not None:
            clean_config = dict([(k, v) for k, v in importer_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            importer_type_id, repo_id)

        result = importer_instance.validate_config(transfer_repo, call_config)

        # For backward compatibility with plugins that don't yet return the tuple
        if isinstance(result, bool):
            valid_config = result
            message = None
        else:
            valid_config, message = result

        if not valid_config:
            raise PulpCodedValidationException(validation_errors=message)
def _re_publish_repository(repo, distributor):
    """
    Re-publish the repository using the new yum distributor.

    NOTE: this may be a bit time-consuming.
    """

    repo = common_utils.to_transfer_repo(repo)
    repo.working_dir = common_utils.distributor_working_dir(distributor['distributor_type_id'], repo.id)
    conduit = RepoPublishConduit(repo.id, distributor['id'])
    config = PluginCallConfiguration(NEW_DISTRIBUTOR_CONF, distributor['config'])

    publisher = Publisher(repo, conduit, config)
    publisher.publish()
Example #18
0
    def remove_distributor(repo_id, distributor_id):
        """
        Removes a distributor from a repository.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_id: identifies the distributor to delete
        @type  distributor_id: str

        @raise MissingResource: if repo_id doesn't correspond to a valid repo
        @raise MissingResource: if there is no distributor with the given ID
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(distributor=distributor_id)

        # remove schedules
        RepoPublishScheduleManager().delete_by_distributor_id(
            repo_id, repo_distributor['id'])

        # Call the distributor's cleanup method
        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        call_config = PluginCallConfiguration(plugin_config,
                                              repo_distributor['config'])

        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            distributor_type_id, repo_id)

        distributor_instance.distributor_removed(transfer_repo, call_config)

        # Update the database to reflect the removal
        distributor_coll.remove({'_id': repo_distributor['_id']}, safe=True)
Example #19
0
def _re_publish_repository(repo, distributor):
    """
    Re-publish the repository using the new yum distributor.

    NOTE: this may be a bit time-consuming.
    """

    repo = common_utils.to_transfer_repo(repo)
    repo.working_dir = distributor_working_dir(
        distributor['distributor_type_id'], repo.id)
    conduit = RepoPublishConduit(repo.id, distributor['id'])
    config = PluginCallConfiguration(NEW_DISTRIBUTOR_CONF,
                                     distributor['config'])

    publisher = Publisher(repo, conduit, config, YUM_DISTRIBUTOR_ID)
    publisher.publish()
Example #20
0
    def validate_importer_config(repo_id, importer_type_id, importer_config):
        """
        Validate an importer configuration. This validates that the repository and importer type
        exist as these are both required to validate the configuration.

        :param repo_id:             identifies the repo
        :type  repo_id:             str
        :param importer_type_id:    identifies the type of importer being added;
                                    must correspond to an importer loaded at server startup
        :type  importer_type_id:    str
        :param importer_config:     configuration values for the importer; may be None
        :type  importer_config:     dict
        """
        repo_coll = Repo.get_collection()
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        if not plugin_api.is_valid_importer(importer_type_id):
            raise PulpCodedValidationException(error_code=error_codes.PLP1008,
                                               importer_type_id=importer_type_id)

        importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if importer_config is not None:
            clean_config = dict([(k, v) for k, v in importer_config.items() if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(importer_type_id, repo_id)

        result = importer_instance.validate_config(transfer_repo, call_config)

        # For backward compatibility with plugins that don't yet return the tuple
        if isinstance(result, bool):
            valid_config = result
            message = None
        else:
            valid_config, message = result

        if not valid_config:
            raise PulpCodedValidationException(validation_errors=message)
Example #21
0
    def create_bind_payload(self, repo_id, distributor_id, binding_config):
        """
        Requests the distributor plugin to generate the consumer bind payload.

        @param repo_id: identifies the repo being bound
        @type  repo_id: str

        @param distributor_id: identifies the distributor
        @type  distributor_id: str

        @param binding_config: configuration applicable only to the binding whose
               payload is being created; may be None
        @type  binding_config: object or None

        @return: payload object to pass to the consumer
        @rtype:  dict

        @raise MissingResource: if the repo or distributor do not exist
        @raise PulpExecutionException: if the distributor raises an error
        """

        # Input Validation
        repo_distributor = self.get_distributor(repo_id, distributor_id)
        repo = Repo.get_collection().find_one({'id': repo_id})

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config,
                                              repo_distributor['config'])
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            distributor_type_id, repo_id)

        try:
            payload = distributor_instance.create_consumer_payload(
                transfer_repo, call_config, binding_config)
            return payload
        except Exception:
            msg = _(
                'Exception raised from distributor [%(d)s] generating consumer payload'
            )
            msg = msg % {'d': distributor_id}
            _logger.exception(msg)
            raise PulpExecutionException(), None, sys.exc_info()[2]
Example #22
0
    def remove_distributor(repo_id, distributor_id):
        """
        Removes a distributor from a repository.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_id: identifies the distributor to delete
        @type  distributor_id: str

        @raise MissingResource: if repo_id doesn't correspond to a valid repo
        @raise MissingResource: if there is no distributor with the given ID
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        repo_distributor = distributor_coll.find_one({'repo_id': repo_id, 'id': distributor_id})
        if repo_distributor is None:
            raise MissingResource(distributor=distributor_id)

        # remove schedules
        RepoPublishScheduleManager().delete_by_distributor_id(repo_id, repo_distributor['id'])

        # Call the distributor's cleanup method
        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        call_config = PluginCallConfiguration(plugin_config, repo_distributor['config'])

        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(distributor_type_id,
                                                                         repo_id)

        distributor_instance.distributor_removed(transfer_repo, call_config)

        # Update the database to reflect the removal
        distributor_coll.remove({'_id': repo_distributor['_id']}, safe=True)
Example #23
0
    def remove_importer(repo_id):
        """
        Removes an importer from a repository.

        :param repo_id:         identifies the repo
        :type  repo_id:         str
        :raise MissingResource: if the given repo does not exist
        :raise MissingResource: if the given repo does not have an importer
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_importer = importer_coll.find_one({'repo_id': repo_id})

        if repo_importer is None:
            raise MissingResource(repo_id)

        # remove schedules
        RepoSyncScheduleManager().delete_by_importer_id(
            repo_id, repo_importer['id'])

        # Call the importer's cleanup method
        importer_type_id = repo_importer['importer_type_id']
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        call_config = PluginCallConfiguration(plugin_config,
                                              repo_importer['config'])

        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            importer_type_id, repo_id)

        importer_instance.importer_removed(transfer_repo, call_config)

        # Update the database to reflect the removal
        importer_coll.remove({'repo_id': repo_id}, safe=True)
Example #24
0
    def create_bind_payload(self, repo_id, distributor_id, binding_config):
        """
        Requests the distributor plugin to generate the consumer bind payload.

        @param repo_id: identifies the repo being bound
        @type  repo_id: str

        @param distributor_id: identifies the distributor
        @type  distributor_id: str

        @param binding_config: configuration applicable only to the binding whose
               payload is being created; may be None
        @type  binding_config: object or None

        @return: payload object to pass to the consumer
        @rtype:  dict

        @raise MissingResource: if the repo or distributor do not exist
        @raise PulpExecutionException: if the distributor raises an error
        """

        # Input Validation
        repo_distributor = self.get_distributor(repo_id, distributor_id)
        repo = Repo.get_collection().find_one({'id': repo_id})

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, repo_distributor['config'])
        transfer_repo = common_utils.to_transfer_repo(repo)

        try:
            payload = distributor_instance.create_consumer_payload(transfer_repo, call_config,
                                                                   binding_config)
            return payload
        except Exception:
            msg = _('Exception raised from distributor [%(d)s] generating consumer payload')
            msg = msg % {'d': distributor_id}
            _logger.exception(msg)
            raise PulpExecutionException(), None, sys.exc_info()[2]
Example #25
0
    def test_to_transfer_repo(self):

        dt = dateutils.now_utc_datetime_with_tzinfo()
        data = {
            'id': 'foo',
            'display_name': 'bar',
            'description': 'baz',
            'notes': 'qux',
            'content_unit_counts': {'units': 1},
            'last_unit_added': dt,
            'last_unit_removed': dt
        }

        repo = to_transfer_repo(data)
        self.assertEquals('foo', repo.id)
        self.assertEquals('bar', repo.display_name)
        self.assertEquals('baz', repo.description)
        self.assertEquals('qux', repo.notes)
        self.assertEquals({'units': 1}, repo.content_unit_counts)
        self.assertEquals(dt, repo.last_unit_added)
        self.assertEquals(dt, repo.last_unit_removed)
Example #26
0
    def remove_importer(repo_id):
        """
        Removes an importer from a repository.

        :param repo_id:         identifies the repo
        :type  repo_id:         str
        :raise MissingResource: if the given repo does not exist
        :raise MissingResource: if the given repo does not have an importer
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_importer = importer_coll.find_one({'repo_id': repo_id})

        if repo_importer is None:
            raise MissingResource(repo_id)

        # remove schedules
        RepoSyncScheduleManager().delete_by_importer_id(repo_id, repo_importer['id'])

        # Call the importer's cleanup method
        importer_type_id = repo_importer['importer_type_id']
        importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id)

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'])

        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(importer_type_id, repo_id)

        importer_instance.importer_removed(transfer_repo, call_config)

        # Update the database to reflect the removal
        importer_coll.remove({'repo_id': repo_id}, safe=True)
Example #27
0
    def add_distributor(self,
                        repo_id,
                        distributor_type_id,
                        repo_plugin_config,
                        auto_publish,
                        distributor_id=None):
        """
        Adds an association from the given repository to a distributor. The
        association will be tracked through the distributor_id; each distributor
        on a given repository must have a unique ID. If this is not specified,
        one will be generated. If a distributor already exists on the repo for
        the given ID, the existing one will be removed and replaced with the
        newly configured one.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_type_id: identifies the distributor; must correspond
                                    to a distributor loaded at server startup
        @type  distributor_type_id: str

        @param repo_plugin_config: configuration the repo will use with this distributor; may be None
        @type  repo_plugin_config: dict

        @param auto_publish: if true, this distributor will be invoked at
                             the end of every sync
        @type  auto_publish: bool

        @param distributor_id: unique ID to refer to this distributor for this repo
        @type  distributor_id: str

        @return: ID assigned to the distributor (only valid in conjunction with the repo)

        @raise MissingResource: if the given repo_id does not refer to a valid repo
        @raise InvalidValue: if the distributor ID is provided and unacceptable
        @raise InvalidDistributorConfiguration: if the distributor plugin does not
               accept the given configuration
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        if not plugin_api.is_valid_distributor(distributor_type_id):
            raise InvalidValue(['distributor_type_id'])

        # Determine the ID for this distributor on this repo; will be
        # unique for all distributors on this repository but not globally
        if distributor_id is None:
            distributor_id = str(uuid.uuid4())
        else:
            # Validate if one was passed in
            if not is_distributor_id_valid(distributor_id):
                raise InvalidValue(['distributor_id'])

        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            distributor_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_distributor_type(
            distributor_type_id)

        transfer_related_repos = []
        for r in related_repos:
            all_configs = [d['config'] for d in r['distributors']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = distributor_instance.validate_config(
                transfer_repo, call_config, transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception(
                'Exception received from distributor [%s] while validating config'
                % distributor_type_id)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #28
0
    def sync(repo_id, sync_config_override=None):
        """
        Performs a synchronize operation on the given repository.

        The given repo must have an importer configured. The identity of the
        importer is not a parameter to this call; if multiple importers are
        eventually supported this will have to change to indicate which
        importer to use.

        This method is intentionally limited to synchronizing a single repo.
        Performing multiple repository syncs concurrently will require a more
        global view of the server and must be handled outside the scope of this
        class.

        @param repo_id: identifies the repo to sync
        @type  repo_id: str

        @param sync_config_override: optional config containing values to use
                                     for this sync only
        @type  sync_config_override: dict

        @return: The synchronization report.
        @rtype: L{pulp.server.plugins.model.SyncReport}

        @raise MissingResource: if repo_id does not refer to a valid repo
        @raise OperationFailed: if the given repo does not have an importer set
        """

        repo_coll = Repo.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        importer_instance, importer_config = RepoSyncManager._get_importer_instance_and_config(
            repo_id)

        if importer_instance is None:
            raise MissingResource(repo_id)

        importer_manager = manager_factory.repo_importer_manager()
        repo_importer = importer_manager.get_importer(repo_id)

        # Assemble the data needed for the sync
        conduit = RepoSyncConduit(repo_id, repo_importer['id'])

        call_config = PluginCallConfiguration(importer_config,
                                              repo_importer['config'],
                                              sync_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.get_working_directory()

        # Fire an events around the call
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_sync_started(repo_id)
        sync_result = RepoSyncManager._do_sync(repo, importer_instance,
                                               transfer_repo, conduit,
                                               call_config)
        fire_manager.fire_repo_sync_finished(sync_result)

        if sync_result['result'] == RepoSyncResult.RESULT_FAILED:
            raise PulpExecutionException(
                _('Importer indicated a failed response'))

        repo_publish_manager = manager_factory.repo_publish_manager()
        auto_distributors = repo_publish_manager.auto_distributors(repo_id)

        spawned_tasks = []
        for distributor in auto_distributors:
            distributor_id = distributor['id']
            spawned_tasks.append(
                repo_publish_manager.queue_publish(repo_id,
                                                   distributor_id).task_id)

        return TaskResult(sync_result, spawned_tasks=spawned_tasks)
Example #29
0
    def associate_from_repo(source_repo_id, dest_repo_id, criteria=None,
                            import_config_override=None):
        """
        Creates associations in a repository based on the contents of a source
        repository. Units from the source repository can be filtered by
        specifying a criteria object.

        The destination repository must have an importer that can support
        the types of units being associated. This is done by analyzing the
        unit list and the importer metadata and takes place before the
        destination repository is called.

        Pulp does not actually perform the associations as part of this call.
        The unit list is determined and passed to the destination repository's
        importer. It is the job of the importer to make the associate calls
        back into Pulp where applicable.

        If criteria is None, the effect of this call is to copy the source
        repository's associations into the destination repository.

        :param source_repo_id:         identifies the source repository
        :type  source_repo_id:         str
        :param dest_repo_id:           identifies the destination repository
        :type  dest_repo_id:           str
        :param criteria:               optional; if specified, will filter the units retrieved from
                                       the source repository
        :type  criteria:               UnitAssociationCriteria
        :param import_config_override: optional config containing values to use for this import only
        :type  import_config_override: dict
        :return:                       list of unit IDs (see pulp.plugins.model.Unit.to_id_dict) for
                                       units that were associated by this operation
        :rtype:                        list
        :raise MissingResource:        if either of the specified repositories don't exist
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        source_repo = repo_query_manager.get_repository(source_repo_id)
        dest_repo = repo_query_manager.get_repository(dest_repo_id)

        # This will raise MissingResource if there isn't one, which is the
        # behavior we want this method to exhibit, so just let it bubble up.
        dest_repo_importer = importer_manager.get_importer(dest_repo_id)
        source_repo_importer = importer_manager.get_importer(source_repo_id)

        # The docs are incorrect on the list_importer_types call; it actually
        # returns a dict with the types under key "types" for some reason.
        supported_type_ids = plugin_api.list_importer_types(
            dest_repo_importer['importer_type_id'])['types']

        # If criteria is specified, retrieve the list of units now
        associate_us = None
        if criteria is not None:
            associate_us = load_associated_units(source_repo_id, criteria)

            # If units were supposed to be filtered but none matched, we're done
            if len(associate_us) is 0:
                # Return an empty list to indicate nothing was copied
                return []

        # Now we can make sure the destination repository's importer is capable
        # of importing either the selected units or all of the units
        associated_unit_type_ids = calculate_associated_type_ids(source_repo_id, associate_us)
        unsupported_types = [t for t in associated_unit_type_ids if t not in supported_type_ids]

        if len(unsupported_types) > 0:
            raise exceptions.InvalidValue(['types'])

        # Convert all of the units into the plugin standard representation if
        # a filter was specified
        transfer_units = None
        if associate_us is not None:
            transfer_units = create_transfer_units(associate_us, associated_unit_type_ids)

        # Convert the two repos into the plugin API model
        transfer_dest_repo = common_utils.to_transfer_repo(dest_repo)
        transfer_dest_repo.working_dir = common_utils.importer_working_dir(
            dest_repo_importer['importer_type_id'], dest_repo['id'], mkdir=True)

        transfer_source_repo = common_utils.to_transfer_repo(source_repo)
        transfer_source_repo.working_dir = common_utils.importer_working_dir(
            source_repo_importer['importer_type_id'], source_repo['id'], mkdir=True)

        # Invoke the importer
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            dest_repo_importer['importer_type_id'])

        call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'],
                                              import_config_override)
        login = manager_factory.principal_manager().get_principal()['login']
        conduit = ImportUnitConduit(
            source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id'],
            RepoContentUnit.OWNER_TYPE_USER, login)

        try:
            copied_units = importer_instance.import_units(
                transfer_source_repo, transfer_dest_repo, conduit, call_config,
                units=transfer_units)
            unit_ids = [u.to_id_dict() for u in copied_units]
            return {'units_successful': unit_ids}

        except Exception:
            msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]')
            msg = msg % {'i': dest_repo_importer['importer_type_id'], 'r': dest_repo_id}
            logger.exception(msg)
            raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
Example #30
0
    def sync(self, repo_id, sync_config_override=None):
        """
        Performs a synchronize operation on the given repository.

        The given repo must have an importer configured. The identity of the
        importer is not a parameter to this call; if multiple importers are
        eventually supported this will have to change to indicate which
        importer to use.

        This method is intentionally limited to synchronizing a single repo.
        Performing multiple repository syncs concurrently will require a more
        global view of the server and must be handled outside the scope of this
        class.

        @param repo_id: identifies the repo to sync
        @type  repo_id: str

        @param sync_config_override: optional config containing values to use
                                     for this sync only
        @type  sync_config_override: dict

        @return: The synchronization report.
        @rtype: L{pulp.server.plugins.model.SyncReport}

        @raise MissingResource: if repo_id does not refer to a valid repo
        @raise OperationFailed: if the given repo does not have an importer set
        """

        repo_coll = Repo.get_collection()

        # Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        importer_instance, importer_config = self._get_importer_instance_and_config(repo_id)

        if importer_instance is None:
            raise MissingResource(repo_id)

        dispatch_context = dispatch_factory.context()
        dispatch_context.set_cancel_control_hook(importer_instance.cancel_sync_repo)

        importer_manager = manager_factory.repo_importer_manager()
        repo_importer = importer_manager.get_importer(repo_id)

        # Assemble the data needed for the sync
        conduit = RepoSyncConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_IMPORTER, repo_importer['id'])

        call_config = PluginCallConfiguration(importer_config, repo_importer['config'], sync_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(repo_importer['importer_type_id'], repo_id, mkdir=True)

        # Fire an events around the call
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_sync_started(repo_id)
        sync_result = self._do_sync(repo, importer_instance, transfer_repo, conduit, call_config)
        fire_manager.fire_repo_sync_finished(sync_result)

        dispatch_context.clear_cancel_control_hook()

        if sync_result['result'] == RepoSyncResult.RESULT_FAILED:
            raise PulpExecutionException(_('Importer indicated a failed response'))

        return sync_result
Example #31
0
    def associate_from_repo(self, source_repo_id, dest_repo_id, criteria=None):
        """
        Creates associations in a repository based on the contents of a source
        repository. Units from the source repository can be filtered by
        specifying a criteria object.

        The destination repository must have an importer that can support
        the types of units being associated. This is done by analyzing the
        unit list and the importer metadata and takes place before the
        destination repository is called.

        Pulp does not actually perform the associations as part of this call.
        The unit list is determined and passed to the destination repository's
        importer. It is the job of the importer to make the associate calls
        back into Pulp where applicable.

        If criteria is None, the effect of this call is to copy the source
        repository's associations into the destination repository.

        @param source_repo_id: identifies the source repository
        @type  source_repo_id: str

        @param dest_repo_id: identifies the destination repository
        @type  dest_repo_id: str

        @param criteria: optional; if specified, will filter the units retrieved
                         from the source repository
        @type  criteria: L{Criteria}

        @raise MissingResource: if either of the specified repositories don't exist
        """

        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()
        dependency_manager = manager_factory.dependency_manager()
        association_query_manager = manager_factory.repo_unit_association_query_manager()

        source_repo = repo_query_manager.get_repository(source_repo_id)
        dest_repo = repo_query_manager.get_repository(dest_repo_id)

        # This will raise MissingResource if there isn't one, which is the
        # behavior we want this method to exhibit, so just let it bubble up.
        dest_repo_importer = importer_manager.get_importer(dest_repo_id)
        source_repo_importer = importer_manager.get_importer(source_repo_id)

        # The docs are incorrect on the list_importer_types call; it actually
        # returns a dict with the types under key "types" for some reason.
        supported_type_ids = plugin_api.list_importer_types(dest_repo_importer['importer_type_id'])['types']

        # If criteria is specified, retrieve the list of units now
        associate_us = None
        if criteria is not None:
            criteria.association_fields = None
            criteria.unit_fields = None

            # Retrieve the units to be associated
            associate_us = association_query_manager.get_units(source_repo_id, criteria=criteria)

            # If units were supposed to be filtered but none matched, we're done
            if len(associate_us) is 0:
                return

        # Now we can make sure the destination repository's importer is capable
        # of importing either the selected units or all of the units
        if associate_us is not None:
            associated_unit_type_ids = set([u['unit_type_id'] for u in associate_us])
        else:
            association_query_manager = manager_factory.repo_unit_association_query_manager()

            # We may want to make a call here that only retrieves the unique
            # type IDs instead of all of the units, but for now it doesn't exist
            # and I'm not entirely sure this will be a huge problem.
            all_units = association_query_manager.get_units(source_repo_id)
            associated_unit_type_ids = set(u['unit_type_id'] for u in all_units)

        unsupported_types = [t for t in associated_unit_type_ids if t not in supported_type_ids]

        if len(unsupported_types) > 0:
            raise exceptions.InvalidValue(['types'])

        # Convert all of the units into the plugin standard representation if
        # a filter was specified
        transfer_units = None
        if associate_us is not None:
            type_defs = {}
            for def_id in associated_unit_type_ids:
                type_def = types_db.type_definition(def_id)
                type_defs[def_id] = type_def

            transfer_units = []
            for unit in associate_us:
                type_id = unit['unit_type_id']
                u = conduit_common_utils.to_plugin_unit(unit, type_defs[type_id])
                transfer_units.append(u)

        # Convert the two repos into the plugin API model
        transfer_dest_repo = common_utils.to_transfer_repo(dest_repo)
        transfer_dest_repo.working_dir = common_utils.importer_working_dir(dest_repo_importer['importer_type_id'], dest_repo['id'], mkdir=True)

        transfer_source_repo = common_utils.to_transfer_repo(source_repo)
        transfer_source_repo.working_dir = common_utils.importer_working_dir(source_repo_importer['importer_type_id'], source_repo['id'], mkdir=True)

        # Invoke the importer
        importer_instance, plugin_config = plugin_api.get_importer_by_id(dest_repo_importer['importer_type_id'])

        call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'])
        conduit = ImportUnitConduit(source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id'])

        try:
            importer_instance.import_units(transfer_source_repo, transfer_dest_repo, conduit, call_config, units=transfer_units)
        except Exception:
            _LOG.exception('Exception from importer [%s] while importing units into repository [%s]' % (dest_repo_importer['importer_type_id'], dest_repo_id))
            raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
Example #32
0
    def publish(self, repo_id, distributor_id, publish_config_override=None):
        """
        Requests the given distributor publish the repository it is configured
        on.

        The publish operation is executed synchronously in the caller's thread
        and will block until it is completed. The caller must take the necessary
        steps to address the fact that a publish call may be time intensive.

        @param repo_id: identifies the repo being published
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor to publish
        @type  distributor_id: str

        @param publish_config_override: optional config values to use for this
                                        publish call only
        @type  publish_config_override: dict, None
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(repository=repo_id,
                                  distributor=distributor_id)

        distributor_instance, distributor_config = self._get_distributor_instance_and_config(
            repo_id, distributor_id)

        if distributor_instance is None:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        dispatch_context = dispatch_factory.context()
        dispatch_context.set_cancel_control_hook(
            distributor_instance.cancel_publish_repo)

        # Assemble the data needed for the publish
        conduit = RepoPublishConduit(repo_id, distributor_id)

        call_config = PluginCallConfiguration(distributor_config,
                                              repo_distributor['config'],
                                              publish_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            repo_distributor['distributor_type_id'], repo_id, mkdir=True)

        # Fire events describing the publish state
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_publish_started(repo_id, distributor_id)
        result = self._do_publish(repo, distributor_id, distributor_instance,
                                  transfer_repo, conduit, call_config)
        fire_manager.fire_repo_publish_finished(result)

        dispatch_context.clear_cancel_control_hook()
Example #33
0
    def associate_from_repo(source_repo_id, dest_repo_id, criteria=None,
                            import_config_override=None):
        """
        Creates associations in a repository based on the contents of a source
        repository. Units from the source repository can be filtered by
        specifying a criteria object.

        The destination repository must have an importer that can support
        the types of units being associated. This is done by analyzing the
        unit list and the importer metadata and takes place before the
        destination repository is called.

        Pulp does not actually perform the associations as part of this call.
        The unit list is determined and passed to the destination repository's
        importer. It is the job of the importer to make the associate calls
        back into Pulp where applicable.

        If criteria is None, the effect of this call is to copy the source
        repository's associations into the destination repository.

        :param source_repo_id:         identifies the source repository
        :type  source_repo_id:         str
        :param dest_repo_id:           identifies the destination repository
        :type  dest_repo_id:           str
        :param criteria:               optional; if specified, will filter the units retrieved from
                                       the source repository
        :type  criteria:               UnitAssociationCriteria
        :param import_config_override: optional config containing values to use for this import only
        :type  import_config_override: dict
        :return:                       dict with key 'units_successful' whose
                                       value is a list of unit keys that were copied.
                                       units that were associated by this operation
        :rtype:                        dict
        :raise MissingResource:        if either of the specified repositories don't exist
        """
        # Validation
        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        source_repo = repo_query_manager.get_repository(source_repo_id)
        dest_repo = repo_query_manager.get_repository(dest_repo_id)

        # This will raise MissingResource if there isn't one, which is the
        # behavior we want this method to exhibit, so just let it bubble up.
        dest_repo_importer = importer_manager.get_importer(dest_repo_id)
        source_repo_importer = importer_manager.get_importer(source_repo_id)

        # The docs are incorrect on the list_importer_types call; it actually
        # returns a dict with the types under key "types" for some reason.
        supported_type_ids = plugin_api.list_importer_types(
            dest_repo_importer['importer_type_id'])['types']

        # If criteria is specified, retrieve the list of units now
        associate_us = None
        if criteria is not None:
            associate_us = load_associated_units(source_repo_id, criteria)

            # If units were supposed to be filtered but none matched, we're done
            if len(associate_us) == 0:
                # Return an empty list to indicate nothing was copied
                return {'units_successful': []}

        # Now we can make sure the destination repository's importer is capable
        # of importing either the selected units or all of the units
        associated_unit_type_ids = calculate_associated_type_ids(source_repo_id, associate_us)
        unsupported_types = [t for t in associated_unit_type_ids if t not in supported_type_ids]

        if len(unsupported_types) > 0:
            raise exceptions.InvalidValue(['types'])

        # Convert all of the units into the plugin standard representation if
        # a filter was specified
        transfer_units = None
        if associate_us is not None:
            transfer_units = create_transfer_units(associate_us, associated_unit_type_ids)

        # Convert the two repos into the plugin API model
        transfer_dest_repo = common_utils.to_transfer_repo(dest_repo)

        transfer_source_repo = common_utils.to_transfer_repo(source_repo)

        # Invoke the importer
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            dest_repo_importer['importer_type_id'])

        call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'],
                                              import_config_override)
        conduit = ImportUnitConduit(
            source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id'])

        try:
            copied_units = importer_instance.import_units(
                transfer_source_repo, transfer_dest_repo, conduit, call_config,
                units=transfer_units)
            unit_ids = [u.to_id_dict() for u in copied_units]
            return {'units_successful': unit_ids}

        except Exception:
            msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]')
            msg = msg % {'i': dest_repo_importer['importer_type_id'], 'r': dest_repo_id}
            logger.exception(msg)
            raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
Example #34
0
    def update_distributor_config(repo_id,
                                  distributor_id,
                                  distributor_config,
                                  auto_publish=None):
        """
        Attempts to update the saved configuration for the given distributor.
        The distributor will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        :param repo_id: identifies the repo
        :type  repo_id: str

        :param distributor_id: identifies the distributor on the repo
        :type  distributor_id: str

        :param distributor_config: new configuration values to use
        :type  distributor_config: dict

        :param auto_publish: If true, this distributor is used automatically during a sync operation
        :type auto_publish: bool

        :return: the updated distributor
        :rtype:  dict

        :raise MissingResource: if the given repo or distributor doesn't exist
        :raise PulpDataException: if the plugin rejects the given changes
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(distributor=distributor_id)

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_distributor['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [
            k for k in distributor_config if distributor_config[k] is None
        ]
        for key in unset_property_names:
            merged_config.pop(key, None)
            distributor_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(distributor_config)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        config_conduit = RepoConfigConduit(distributor_type_id)

        result = distributor_instance.validate_config(transfer_repo,
                                                      call_config,
                                                      config_conduit)

        # For backward compatibility with plugins that don't yet return the tuple
        if isinstance(result, bool):
            valid_config = result
            message = None
        else:
            valid_config, message = result

        if not valid_config:
            raise PulpDataException(message)

        # Confirm that the auto_publish value is sane before updating the value, if it exists
        if auto_publish is not None:
            if isinstance(auto_publish, bool):
                repo_distributor['auto_publish'] = auto_publish
            else:
                raise InvalidValue(['auto_publish'])

        # If we got this far, the new config is valid, so update the database
        repo_distributor['config'] = merged_config
        distributor_coll.save(repo_distributor, safe=True)

        return repo_distributor
Example #35
0
    def update_importer_config(self, repo_id, importer_config):
        """
        Attempts to update the saved configuration for the given repo's importer.
        The importer will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param importer_config: new configuration values to use for this repo
        @type  importer_config: dict

        @raise MissingResource: if the given repo does not exist
        @raise MissingResource: if the given repo does not have an importer
        @raise InvalidConfiguration: if the plugin indicates the given
                configuration is invalid
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_importer = importer_coll.find_one({'repo_id': repo_id})
        if repo_importer is None:
            raise MissingResource(repo_id)

        importer_type_id = repo_importer['importer_type_id']
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_importer['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [
            k for k in importer_config if importer_config[k] is None
        ]
        for key in unset_property_names:
            merged_config.pop(key, None)
            importer_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(importer_config)

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            importer_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_importer_type(importer_type_id)

        transfer_related_repos = []
        for r in related_repos:

            # Don't include the repo being updated in this list
            if r['id'] == repo_id:
                continue

            all_configs = [d['config'] for d in r['importers']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = importer_instance.validate_config(transfer_repo,
                                                       call_config,
                                                       transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception(
                'Exception received from importer [%s] while validating config for repo [%s]'
                % (importer_type_id, repo_id))
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #36
0
    def import_uploaded_unit(repo_id, unit_type_id, unit_key, unit_metadata, upload_id,
                             override_config=None):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        :param repo_id:       identifies the repository into which the unit is uploaded
        :type  repo_id:       str
        :param unit_type_id:  type of unit being uploaded
        :type  unit_type_id:  str
        :param unit_key:      unique identifier for the unit (user-specified)
        :type  unit_key:      dict
        :param unit_metadata: any user-specified information about the unit
        :type  unit_metadata: dict
        :param upload_id:     upload being imported
        :type  upload_id:     str
        :return:              A SyncReport indicating the success or failure of the upload
        :rtype:               pulp.plugins.model.SyncReport
        """
        # If it doesn't raise an exception, it's good to go
        ContentUploadManager.is_valid_upload(repo_id, unit_type_id)

        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        repo = repo_query_manager.find_by_id(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'])

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'],
                                              override_config)
        transfer_repo = repo_common_utils.to_transfer_repo(repo)

        file_path = ContentUploadManager._upload_file_path(upload_id)

        # Invoke the importer
        try:
            return importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key,
                                                 unit_metadata, file_path, conduit, call_config)
        except PulpException:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise
        except Exception, e:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise PulpExecutionException(e), None, sys.exc_info()[2]
Example #37
0
    def sync(repo_id, sync_config_override=None):
        """
        Performs a synchronize operation on the given repository.

        The given repo must have an importer configured. The identity of the
        importer is not a parameter to this call; if multiple importers are
        eventually supported this will have to change to indicate which
        importer to use.

        This method is intentionally limited to synchronizing a single repo.
        Performing multiple repository syncs concurrently will require a more
        global view of the server and must be handled outside the scope of this
        class.

        @param repo_id: identifies the repo to sync
        @type  repo_id: str

        @param sync_config_override: optional config containing values to use
                                     for this sync only
        @type  sync_config_override: dict

        @return: The synchronization report.
        @rtype: L{pulp.server.plugins.model.SyncReport}

        @raise MissingResource: if repo_id does not refer to a valid repo
        @raise OperationFailed: if the given repo does not have an importer set
        """

        repo_coll = Repo.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        importer_instance, importer_config = RepoSyncManager._get_importer_instance_and_config(
            repo_id)

        if importer_instance is None:
            raise MissingResource(repo_id)

        importer_manager = manager_factory.repo_importer_manager()
        repo_importer = importer_manager.get_importer(repo_id)

        # Assemble the data needed for the sync
        conduit = RepoSyncConduit(repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_IMPORTER,
                                  repo_importer['id'])

        call_config = PluginCallConfiguration(importer_config, repo_importer['config'],
                                              sync_config_override)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.get_working_directory()

        # Fire an events around the call
        fire_manager = manager_factory.event_fire_manager()
        fire_manager.fire_repo_sync_started(repo_id)
        sync_result = RepoSyncManager._do_sync(repo, importer_instance, transfer_repo, conduit,
                                               call_config)
        fire_manager.fire_repo_sync_finished(sync_result)

        if sync_result['result'] == RepoSyncResult.RESULT_FAILED:
            raise PulpExecutionException(_('Importer indicated a failed response'))

        repo_publish_manager = manager_factory.repo_publish_manager()
        auto_distributors = repo_publish_manager.auto_distributors(repo_id)

        spawned_tasks = []
        for distributor in auto_distributors:
            distributor_id = distributor['id']
            spawned_tasks.append(
                repo_publish_manager.queue_publish(repo_id, distributor_id).task_id)

        return TaskResult(sync_result, spawned_tasks=spawned_tasks)
Example #38
0
    def add_distributor(repo_id, distributor_type_id, repo_plugin_config,
                        auto_publish, distributor_id=None):
        """
        Adds an association from the given repository to a distributor. The
        association will be tracked through the distributor_id; each distributor
        on a given repository must have a unique ID. If this is not specified,
        one will be generated. If a distributor already exists on the repo for
        the given ID, the existing one will be removed and replaced with the
        newly configured one.

        :param repo_id:                         identifies the repo
        :type  repo_id:                         str
        :param distributor_type_id:             identifies the distributor; must correspond to a
                                                distributor loaded at server startup
        :type  distributor_type_id:             str
        :param repo_plugin_config:              configuration the repo will use with this
                                                distributor; may be None
        :type  repo_plugin_config:              dict
        :param auto_publish:                    if true, this distributor will be invoked at the end
                                                of every sync
        :type  auto_publish:                    bool
        :param distributor_id:                  unique ID to refer to this distributor for this repo
        :type  distributor_id:                  str
        :return:                                ID assigned to the distributor (only valid in
                                                conjunction with the repo)
        :raise MissingResource:                 if the given repo_id does not refer to a valid repo
        :raise InvalidValue:                    if the distributor ID is provided and unacceptable
        :raise InvalidDistributorConfiguration: if the distributor plugin does not accept the given
                                                configuration
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        if not plugin_api.is_valid_distributor(distributor_type_id):
            raise InvalidValue(['distributor_type_id'])

        # Determine the ID for this distributor on this repo; will be
        # unique for all distributors on this repository but not globally
        if distributor_id is None:
            distributor_id = str(uuid.uuid4())
        else:
            # Validate if one was passed in
            if not is_distributor_id_valid(distributor_id):
                raise InvalidValue(['distributor_id'])

        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None])
        else:
            clean_config = None

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(distributor_type_id,
                                                                         repo_id)
        config_conduit = RepoConfigConduit(distributor_type_id)

        result = distributor_instance.validate_config(transfer_repo, call_config, config_conduit)

        # For backward compatibility with plugins that don't yet return the tuple
        if isinstance(result, bool):
            valid_config = result
            message = None
        else:
            valid_config, message = result

        if not valid_config:
            raise PulpDataException(message)

        # Remove the old distributor if it exists
        try:
            RepoDistributorManager.remove_distributor(repo_id, distributor_id)
        except MissingResource:
            pass  # if it didn't exist, no problem

        # Let the distributor plugin initialize the repository
        try:
            distributor_instance.distributor_added(transfer_repo, call_config)
        except Exception:
            msg = _('Error initializing distributor [%(d)s] for repo [%(r)s]')
            msg = msg % {'d': distributor_type_id, 'r': repo_id}
            _logger.exception(msg)
            raise PulpExecutionException(), None, sys.exc_info()[2]

        # Database Update
        distributor = RepoDistributor(repo_id, distributor_id, distributor_type_id, clean_config,
                                      auto_publish)
        distributor_coll.save(distributor, safe=True)

        return distributor
Example #39
0
    def add_distributor(repo_id,
                        distributor_type_id,
                        repo_plugin_config,
                        auto_publish,
                        distributor_id=None):
        """
        Adds an association from the given repository to a distributor. The
        association will be tracked through the distributor_id; each distributor
        on a given repository must have a unique ID. If this is not specified,
        one will be generated. If a distributor already exists on the repo for
        the given ID, the existing one will be removed and replaced with the
        newly configured one.

        :param repo_id:                         identifies the repo
        :type  repo_id:                         str
        :param distributor_type_id:             identifies the distributor; must correspond to a
                                                distributor loaded at server startup
        :type  distributor_type_id:             str
        :param repo_plugin_config:              configuration the repo will use with this
                                                distributor; may be None
        :type  repo_plugin_config:              dict
        :param auto_publish:                    if true, this distributor will be invoked at the end
                                                of every sync
        :type  auto_publish:                    bool
        :param distributor_id:                  unique ID to refer to this distributor for this repo
        :type  distributor_id:                  str
        :return:                                ID assigned to the distributor (only valid in
                                                conjunction with the repo)
        :raise MissingResource:                 if the given repo_id does not refer to a valid repo
        :raise InvalidValue:                    if the distributor ID is provided and unacceptable
        :raise InvalidDistributorConfiguration: if the distributor plugin does not accept the given
                                                configuration
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        if not plugin_api.is_valid_distributor(distributor_type_id):
            raise InvalidValue(['distributor_type_id'])

        # Determine the ID for this distributor on this repo; will be
        # unique for all distributors on this repository but not globally
        if distributor_id is None:
            distributor_id = str(uuid.uuid4())
        else:
            # Validate if one was passed in
            if not is_distributor_id_valid(distributor_id):
                raise InvalidValue(['distributor_id'])

        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)

        config_conduit = RepoConfigConduit(distributor_type_id)

        result = distributor_instance.validate_config(transfer_repo,
                                                      call_config,
                                                      config_conduit)

        # For backward compatibility with plugins that don't yet return the tuple
        if isinstance(result, bool):
            valid_config = result
            message = None
        else:
            valid_config, message = result

        if not valid_config:
            raise PulpDataException(message)

        # Remove the old distributor if it exists
        try:
            RepoDistributorManager.remove_distributor(repo_id, distributor_id)
        except MissingResource:
            pass  # if it didn't exist, no problem

        # Let the distributor plugin initialize the repository
        try:
            distributor_instance.distributor_added(transfer_repo, call_config)
        except Exception:
            msg = _('Error initializing distributor [%(d)s] for repo [%(r)s]')
            msg = msg % {'d': distributor_type_id, 'r': repo_id}
            _logger.exception(msg)
            raise PulpExecutionException(), None, sys.exc_info()[2]

        # Database Update
        distributor = RepoDistributor(repo_id, distributor_id,
                                      distributor_type_id, clean_config,
                                      auto_publish)
        distributor_coll.save(distributor, safe=True)

        return distributor
Example #40
0
    def update_importer_config(repo_id, importer_config):
        """
        Attempts to update the saved configuration for the given repo's importer.
        The importer will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        :param repo_id:              identifies the repo
        :type  repo_id:              str
        :param importer_config:      new configuration values to use for this repo
        :type  importer_config:      dict
        :raise MissingResource:      if the given repo does not exist
        :raise MissingResource:      if the given repo does not have an importer
        :raise InvalidConfiguration: if the plugin indicates the given configuration is invalid
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_importer = importer_coll.find_one({'repo_id': repo_id})
        if repo_importer is None:
            raise MissingResource(repo_id)

        importer_type_id = repo_importer['importer_type_id']
        importer_instance, plugin_config = plugin_api.get_importer_by_id(importer_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_importer['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [k for k in importer_config if importer_config[k] is None]
        for key in unset_property_names:
            merged_config.pop(key, None)
            importer_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(importer_config)

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)

        try:
            result = importer_instance.validate_config(transfer_repo, call_config)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            msg = _('Exception received from importer [%(i)s] while validating config for repo '
                    '[%(r)s]')
            msg = msg % {'i': importer_type_id, 'r': repo_id}
            _logger.exception(msg)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #41
0
    def update_distributor_config(self, repo_id, distributor_id, distributor_config):
        """
        Attempts to update the saved configuration for the given distributor.
        The distributor will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_id: identifies the distributor on the repo
        @type  distributor_id: str

        @param distributor_config: new configuration values to use
        @type  distributor_config: dict

        @return: the updated distributor
        @rtype:  dict

        @raise MissingResource: if the given repo or distributor doesn't exist
        @raise PulpDataException: if the plugin rejects the given changes
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_distributor = distributor_coll.find_one({'repo_id' : repo_id, 'id' : distributor_id})
        if repo_distributor is None:
            raise MissingResource(distributor_id)

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_distributor['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [k for k in distributor_config if distributor_config[k] is None]
        for key in unset_property_names:
            merged_config.pop(key, None)
            distributor_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(distributor_config)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(distributor_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_distributor_type(distributor_type_id)

        transfer_related_repos = []
        for r in related_repos:

            # Don't include the repo being updated in this list
            if r['id'] == repo_id:
                continue

            all_configs = [d['config'] for d in r['distributors']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = distributor_instance.validate_config(transfer_repo, call_config, transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception('Exception raised from distributor [%s] while validating config for repo [%s]' % (distributor_type_id, repo_id))
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #42
0
    def update_importer_config(repo_id, importer_config):
        """
        Attempts to update the saved configuration for the given repo's importer.
        The importer will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        :param repo_id:              identifies the repo
        :type  repo_id:              str
        :param importer_config:      new configuration values to use for this repo
        :type  importer_config:      dict
        :raise MissingResource:      if the given repo does not exist
        :raise MissingResource:      if the given repo does not have an importer
        :raise InvalidConfiguration: if the plugin indicates the given configuration is invalid
        """

        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        repo_importer = importer_coll.find_one({'repo_id': repo_id})
        if repo_importer is None:
            raise MissingResource(repo_id)

        importer_type_id = repo_importer['importer_type_id']
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_importer['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [
            k for k in importer_config if importer_config[k] is None
        ]
        for key in unset_property_names:
            merged_config.pop(key, None)
            importer_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(importer_config)

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)

        try:
            result = importer_instance.validate_config(transfer_repo,
                                                       call_config)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            msg = _(
                'Exception received from importer [%(i)s] while validating config for repo '
                '[%(r)s]')
            msg = msg % {'i': importer_type_id, 'r': repo_id}
            _logger.exception(msg)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #43
0
    def add_distributor(self, repo_id, distributor_type_id, repo_plugin_config,
                        auto_publish, distributor_id=None):
        """
        Adds an association from the given repository to a distributor. The
        association will be tracked through the distributor_id; each distributor
        on a given repository must have a unique ID. If this is not specified,
        one will be generated. If a distributor already exists on the repo for
        the given ID, the existing one will be removed and replaced with the
        newly configured one.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_type_id: identifies the distributor; must correspond
                                    to a distributor loaded at server startup
        @type  distributor_type_id: str

        @param repo_plugin_config: configuration the repo will use with this distributor; may be None
        @type  repo_plugin_config: dict

        @param auto_publish: if true, this distributor will be invoked at
                             the end of every sync
        @type  auto_publish: bool

        @param distributor_id: unique ID to refer to this distributor for this repo
        @type  distributor_id: str

        @return: ID assigned to the distributor (only valid in conjunction with the repo)

        @raise MissingResource: if the given repo_id does not refer to a valid repo
        @raise InvalidValue: if the distributor ID is provided and unacceptable
        @raise InvalidDistributorConfiguration: if the distributor plugin does not
               accept the given configuration
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id' : repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        if not plugin_api.is_valid_distributor(distributor_type_id):
            raise InvalidValue(['distributor_type_id'])

        # Determine the ID for this distributor on this repo; will be
        # unique for all distributors on this repository but not globally
        if distributor_id is None:
            distributor_id = str(uuid.uuid4())
        else:
            # Validate if one was passed in
            if not is_distributor_id_valid(distributor_id):
                raise InvalidValue(['distributor_id'])

        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items() if v is not None])
        else:
            clean_config = None

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(distributor_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_distributor_type(distributor_type_id)

        transfer_related_repos = []
        for r in related_repos:
            all_configs = [d['config'] for d in r['distributors']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = distributor_instance.validate_config(transfer_repo, call_config, transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception('Exception received from distributor [%s] while validating config' % distributor_type_id)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #44
0
    def update_distributor_config(repo_id, distributor_id, distributor_config, auto_publish=None):
        """
        Attempts to update the saved configuration for the given distributor.
        The distributor will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        :param repo_id: identifies the repo
        :type  repo_id: str

        :param distributor_id: identifies the distributor on the repo
        :type  distributor_id: str

        :param distributor_config: new configuration values to use
        :type  distributor_config: dict

        :param auto_publish: If true, this distributor is used automatically during a sync operation
        :type auto_publish: bool

        :return: the updated distributor
        :rtype:  dict

        :raise MissingResource: if the given repo or distributor doesn't exist
        :raise PulpDataException: if the plugin rejects the given changes
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        repo_distributor = distributor_coll.find_one({'repo_id': repo_id, 'id': distributor_id})
        if repo_distributor is None:
            raise MissingResource(distributor=distributor_id)

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(distributor_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_distributor['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [k for k in distributor_config if distributor_config[k] is None]
        for key in unset_property_names:
            merged_config.pop(key, None)
            distributor_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(distributor_config)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(distributor_type_id,
                                                                         repo_id)
        config_conduit = RepoConfigConduit(distributor_type_id)

        try:
            result = distributor_instance.validate_config(transfer_repo, call_config,
                                                          config_conduit)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            msg = _('Exception raised from distributor [%(d)s] while validating config for repo '
                    '[%(r)s]')
            msg = msg % {'d': distributor_type_id, 'r': repo_id}
            _logger.exception(msg)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
Example #45
0
    def set_importer(repo_id, importer_type_id, repo_plugin_config):
        """
        Configures an importer to be used for the given repository.

        Keep in mind this method is written assuming single importer for a repo.
        The domain model technically supports multiple importers, but this
        call is what enforces the single importer behavior.

        :param repo_id:                      identifies the repo
        :type  repo_id:                      str
        :param importer_type_id:             identifies the type of importer being added;
                                             must correspond to an importer loaded at server startup
        :type  importer_type_id:             str
        :param repo_plugin_config:           configuration values for the importer; may be None
        :type  repo_plugin_config:           dict
        :raise MissingResource:              if repo_id does not represent a valid repo
        :raise InvalidImporterConfiguration: if the importer cannot be initialized for the given
                                             repo
        """
        RepoImporterManager.validate_importer_config(repo_id, importer_type_id,
                                                     repo_plugin_config)
        repo_coll = Repo.get_collection()
        importer_coll = RepoImporter.get_collection()

        repo = repo_coll.find_one({'id': repo_id})
        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)

        # Remove old importer if one exists
        try:
            RepoImporterManager.remove_importer(repo_id)
        except MissingResource:
            pass  # it didn't exist, so no harm done

        # Let the importer plugin initialize the repository
        try:
            importer_instance.importer_added(transfer_repo, call_config)
        except Exception:
            _logger.exception(
                'Error initializing importer [%s] for repo [%s]' %
                (importer_type_id, repo_id))
            raise PulpExecutionException(), None, sys.exc_info()[2]

        # Database Update
        importer_id = importer_type_id  # use the importer name as its repo ID

        importer = RepoImporter(repo_id, importer_id, importer_type_id,
                                clean_config)
        importer_coll.save(importer, safe=True)

        return importer
Example #46
0
    def import_uploaded_unit(self, repo_id, unit_type_id, unit_key,
                             unit_metadata, upload_id):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        @param repo_id: identifies the repository into which the unit is uploaded
        @type  repo_id: str

        @param unit_type_id: type of unit being uploaded
        @type  unit_type_id: str

        @param unit_key: unique identifier for the unit (user-specified)
        @type  unit_key: dict

        @param unit_metadata: any user-specified information about the unit
        @type  unit_metadata: dict

        @param upload_id: upload being imported
        @type  upload_id: str
        """

        # If it doesn't raise an exception, it's good to go
        self.is_valid_upload(repo_id, unit_type_id)

        repo_query_manager = manager_factory.repo_query_manager()
        importer_manager = manager_factory.repo_importer_manager()

        repo = repo_query_manager.find_by_id(repo_id)
        repo_importer = importer_manager.get_importer(repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(
            repo_id, repo_importer['id'], RepoContentUnit.OWNER_TYPE_USER,
            manager_factory.principal_manager().get_principal()['login'])

        call_config = PluginCallConfiguration(plugin_config,
                                              repo_importer['config'], None)
        transfer_repo = repo_common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = repo_common_utils.importer_working_dir(
            repo_importer['importer_type_id'], repo_id, mkdir=True)

        file_path = self._upload_file_path(upload_id)

        # Invoke the importer
        try:
            importer_instance.upload_unit(transfer_repo, unit_type_id,
                                          unit_key, unit_metadata, file_path,
                                          conduit, call_config)
        except PulpException:
            _LOG.exception(
                'Error from the importer while importing uploaded unit to repository [%s]'
                % repo_id)
            raise
        except Exception, e:
            _LOG.exception(
                'Error from the importer while importing uploaded unit to repository [%s]'
                % repo_id)
            raise PulpExecutionException(e), None, sys.exc_info()[2]