def import_content(self, path, location=None): """ Import a content file into platform storage. The (optional) *location* may be used to specify a path within the unit storage where the content is to be stored. For example: import_content('/tmp/file') will store 'file' at: _storage_path import_content('/tmp/file', 'a/b/c) will store 'file' at: _storage_path/a/b/c :param path: The absolute path to the file to be imported. :type path: str :param location: The (optional) location within the unit storage path where the content is to be stored. :type location: str :raises PulpCodedException: PLP0036 if the unit has not been saved. :raises PulpCodedException: PLP0037 if *path* is not an existing file. """ if not self._last_updated: raise exceptions.PulpCodedException(error_code=error_codes.PLP0036) if not os.path.isfile(path): raise exceptions.PulpCodedException(error_code=error_codes.PLP0037, path=path) with FileStorage() as storage: storage.put(self, path, location)
def test_with_child_exceptions(self): """ Test initialization with child exceptions """ e = exceptions.PulpCodedValidationException( [exceptions.PulpCodedException(), exceptions.PulpCodedException()]) self.assertEquals(e.error_code, error_codes.PLP1000) self.assertEquals(len(e.child_exceptions), 2)
def get_image_ids(self): """ Get a list of all images in the upstream repository. This is conceptually a little ambiguous, as there can be images in a repo that are neither tagged nor in the ancestry for a tagged image. :return: list of image IDs in the repo :rtype: list :raises pulp_exceptions.PulpCodedException: if fetching the IDs fails """ path = self.IMAGES_PATH % self.name _logger.debug('retrieving image ids from remote registry') try: raw_data = self._get_single_path(path) except IOError as e: _logger.debug(traceback.format_exc()) raise pulp_exceptions.PulpCodedException( error_code=error_codes.DKR1007, repo=self.name, registry=self.registry_url, reason=str(e)) return [item['id'] for item in raw_data]
def test_field_validation_correct(self): """ Test to ensure that the fields required by the error code are available in the error data. """ exceptions.PulpCodedException(error_codes.PLP0000, message='foo message', extra_field='bar')
def _do_publish(repo_obj, dist_id, dist_inst, transfer_repo, conduit, call_config): """ Publish the repository using the given distributor. :param repo_obj: repository object :type repo_obj: pulp.server.db.model.Repository :param dist_id: identifies the distributor :type dist_id: str :param dist_inst: instance of the distributor :type dist_inst: dict :param transfer_repo: dict representation of a repo for the plugins to use :type transfer_repo: pulp.plugins.model.Repository :param conduit: allows the plugin to interact with core pulp :type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param call_config: allows the plugin to retrieve values :type call_config: pulp.plugins.config.PluginCallConfiguration :return: publish result containing information about the publish :rtype: pulp.server.db.model.repository.RepoPublishResult :raises pulp_exceptions.PulpCodedException: if the publish report's success flag is falsey """ publish_result_coll = RepoPublishResult.get_collection() publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler(dist_inst.publish_repo, dist_inst.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) if publish_report is not None and hasattr(publish_report, 'success_flag') \ and not publish_report.success_flag: _logger.info(publish_report.summary) raise pulp_exceptions.PulpCodedException( error_code=error_codes.PLP0034, repository_id=repo_obj.repo_id, distributor_id=dist_id, summary=publish_report.summary ) except Exception, e: exception_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin dist = model.Distributor.objects.get_or_404(repo_id=repo_obj.repo_id, distributor_id=dist_id) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_obj.repo_id, dist.distributor_id, dist.distributor_type_id, publish_start_timestamp, exception_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_obj.repo_id})) raise
def ensure_all_units_downloaded(repo_id): """ Checks the database to make sure all units in the repo have been downloaded. If not, raises an exception. :param repo_id: repository id :type repo_id: basestring :raises exceptions.PulpCodedException: if any unit in the repo is un-downloaded """ if not repo_controller.has_all_units_downloaded(repo_id): raise exceptions.PulpCodedException(error_code=error_codes.PLP0045)
def set_content(self, source_location): """ Store the source of the content for the unit and the relative path where it should be stored within the plugin content directory. :param source_location: The absolute path to the content in the plugin working directory. :type source_location: str :raises PulpCodedException: PLP0036 if the source_location doesn't exist. """ if not os.path.exists(source_location): raise exceptions.PulpCodedException( error_code=error_codes.PLP0036, source_location=source_location) self._source_location = source_location
def ensure_all_units_downloaded(repo_group): """ Checks the database to make sure all units in the repos have been downloaded. If not, raises an exception. :param repo_group: RepositoryGroup instance that should be checked :type repo_group: pulp.plugins.model.RepositoryGroup :raises exceptions.PulpCodedException: if any unit in the group is un-downloaded """ bad_repo_ids = filter(lambda x: not repo_controller.has_all_units_downloaded(x), repo_group.repo_ids or []) if bad_repo_ids: raise exceptions.PulpCodedException(error_code=error_codes.PLP0046, repos=', '.join(bad_repo_ids))
def post_init_signal(cls, sender, document): """ The signal that is triggered before a unit is initialized This is used to validate that the unit_key_fields attribute is set properly :param sender: sender class :type sender: object :param document: Document that sent the signal :type document: ContentUnit :raises: PLP0035 if the unit_key_fields attribute has not been defined """ if not hasattr(document, 'unit_key_fields'): class_name = type(document).__name__ raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=class_name)
def test_to_dict_nested_pulp_exception(self): test_exception = exceptions.PulpException("foo_msg") test_exception.error_data = {"foo": "bar"} test_exception.add_child_exception( exceptions.PulpCodedException(error_codes.PLP0001)) result = test_exception.to_dict() child_exception = result['sub_errors'][0] compare_dict( child_exception, { 'code': error_codes.PLP0001.code, 'description': error_codes.PLP0001.message, 'data': {}, 'sub_errors': [] })
def get_tags(self): """ Get a list of the available tags in the repository. :return: A list of basestrings of the available tags in the repository. :rtype: list """ path = self.TAGS_PATH.format(name=self.name) _logger.debug('retrieving tags from remote registry') try: headers, tags = self._get_path(path) except IOError as e: raise pulp_exceptions.PulpCodedException(error_code=error_codes.DKR1007, repo=self.name, registry=self.registry_url, reason=str(e)) return json.loads(tags)['tags'] or []
def attach_signals(cls): """ Attach the signals to this class. This is provided as a class method so it can be called on subclasses and all the correct signals will be applied. """ signals.pre_save.connect(cls.pre_save_signal, sender=cls) # Validate that the minimal set of fields has been defined if len(cls.unit_key_fields) == 0: class_name = cls.__name__ raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=class_name) # Create the named tuple here so it happens during server startup cls.NAMED_TUPLE = namedtuple(cls.unit_type_id.default, cls.unit_key_fields)
def validate_model_definition(cls): """ Validate that all subclasses of ContentType define required fields correctly. Ensure a field named `_content_type_id` is defined and raise a ValidationError if not. Each subclass of ContentUnit must have the content type id stored in the `_content_type_id` field as a StringField. The field must be marked as required and have a default set. For example: _content_type_id = StringField(required=True, default='rpm') Ensure a field named `unit_key_fields` is defined and raise a ValidationError if not. Each subclass of ContentUnit must have the content type id stored in the `unit_key_fields` field as a tuple and must not be empty. unit_key_fields = ('author', 'name', 'version') :raises: PLP0035 if a field or attribute is incorrectly defined """ # Validate the 'unit_key_fields' attribute if not hasattr(cls, 'unit_key_fields'): msg = _( "The class %(class_name)s must define a 'unit_key_fields' attribute" ) _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='unit_key_fields') if not isinstance(cls.unit_key_fields, tuple): msg = _( "The class %(class_name)s must define 'unit_key_fields' to be a tuple" ) _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='unit_key_fields') if len(cls.unit_key_fields) == 0: msg = _( "The field 'unit_key_fields' on class %(class_name)s must have length > 0" ) _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='unit_key_fields') # Validate the '_content_type_id' field if not hasattr(cls, '_content_type_id'): msg = _( "The class %(class_name)s must define a '_content_type_id' attribute" ) _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='_content_type_id') if not isinstance(cls._content_type_id, StringField): msg = _( "The class %(class_name)s must define '_content_type_id' to be a StringField" ) _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='_content_type_id') if cls._content_type_id.default is None: msg = _("The class %(class_name)s must define a default value " "for the '_content_type_id' field") _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='_content_type_id') if cls._content_type_id.required is False: msg = _( "The class %(class_name)s must require the '_content_type_id' field" ) _logger.error(msg, {'class_name': cls.__name__}) raise exceptions.PulpCodedException(error_codes.PLP0035, class_name=cls.__name__, field_name='_content_type_id')
def associate_from_repo(source_repo_id, dest_repo_id, criteria=None, import_config_override=None): """ Creates associations in a repository based on the contents of a source repository. Units from the source repository can be filtered by specifying a criteria object. The destination repository must have an importer that can support the types of units being associated. This is done by analyzing the unit list and the importer metadata and takes place before the destination repository is called. Pulp does not actually perform the associations as part of this call. The unit list is determined and passed to the destination repository's importer. It is the job of the importer to make the associate calls back into Pulp where applicable. If criteria is None, the effect of this call is to copy the source repository's associations into the destination repository. :param source_repo_id: identifies the source repository :type source_repo_id: str :param dest_repo_id: identifies the destination repository :type dest_repo_id: str :param criteria: optional; if specified, will filter the units retrieved from the source repository :type criteria: pulp.server.db.model.criteria.UnitAssociationCriteria :param import_config_override: optional config containing values to use for this import only :type import_config_override: dict :return: dict with key 'units_successful' whose value is a list of unit keys that were copied. units that were associated by this operation :rtype: dict :raise MissingResource: if either of the specified repositories don't exist """ importer_manager = manager_factory.repo_importer_manager() source_repo = model.Repository.objects.get_repo_or_missing_resource( source_repo_id) dest_repo = model.Repository.objects.get_repo_or_missing_resource( dest_repo_id) # This will raise MissingResource if there isn't one, which is the # behavior we want this method to exhibit, so just let it bubble up. dest_repo_importer = importer_manager.get_importer(dest_repo_id) source_repo_importer = importer_manager.get_importer(source_repo_id) # The docs are incorrect on the list_importer_types call; it actually # returns a dict with the types under key "types" for some reason. supported_type_ids = set( plugin_api.list_importer_types( dest_repo_importer['importer_type_id'])['types']) # Get the unit types from the repo source repo source_repo_unit_types = set(source_repo.content_unit_counts.keys()) # Now we can make sure the destination repository's importer is capable # of importing either the selected units or all of the units if not source_repo_unit_types.issubset(supported_type_ids): raise exceptions.PulpCodedException( error_code=error_codes.PLP0000, message= 'The the target importer does not support the types from the source' ) transfer_units = None # If criteria is specified, retrieve the list of units now if criteria is not None: # if all source types have been converted to mongo - search via new style if source_repo_unit_types.issubset( set(plugin_api.list_unit_models())): association_q = mongoengine.Q( __raw__=criteria.association_spec) unit_q = mongoengine.Q(__raw__=criteria.unit_spec) transfer_units = repo_controller.find_repo_content_units( repository=source_repo, repo_content_unit_q=association_q, units_q=unit_q, yield_content_unit=True) else: # else, search via old style associate_us = load_associated_units(source_repo_id, criteria) # If units were supposed to be filtered but none matched, we're done if len(associate_us) == 0: # Return an empty list to indicate nothing was copied return {'units_successful': []} # Convert all of the units into the plugin standard representation if # a filter was specified transfer_units = None if associate_us is not None: transfer_units = create_transfer_units(associate_us) # Convert the two repos into the plugin API model transfer_dest_repo = dest_repo.to_transfer_repo() transfer_source_repo = source_repo.to_transfer_repo() # Invoke the importer importer_instance, plugin_config = plugin_api.get_importer_by_id( dest_repo_importer['importer_type_id']) call_config = PluginCallConfiguration(plugin_config, dest_repo_importer['config'], import_config_override) conduit = ImportUnitConduit(source_repo_id, dest_repo_id, source_repo_importer['id'], dest_repo_importer['id']) try: copied_units = importer_instance.import_units(transfer_source_repo, transfer_dest_repo, conduit, call_config, units=transfer_units) unit_ids = [u.to_id_dict() for u in copied_units] return {'units_successful': unit_ids} except Exception: msg = _( 'Exception from importer [%(i)s] while importing units into repository [%(r)s]' ) msg_dict = { 'i': dest_repo_importer['importer_type_id'], 'r': dest_repo_id } logger.exception(msg % msg_dict) raise exceptions.PulpExecutionException(), None, sys.exc_info()[2]
def associate_from_repo(source_repo_id, dest_repo_id, criteria, import_config_override=None): """ Creates associations in a repository based on the contents of a source repository. Units from the source repository can be filtered by specifying a criteria object. The destination repository must have an importer that can support the types of units being associated. This is done by analyzing the unit list and the importer metadata and takes place before the destination repository is called. Pulp does not actually perform the associations as part of this call. The unit list is determined and passed to the destination repository's importer. It is the job of the importer to make the associate calls back into Pulp where applicable. If criteria is None, the effect of this call is to copy the source repository's associations into the destination repository. :param source_repo_id: identifies the source repository :type source_repo_id: str :param dest_repo_id: identifies the destination repository :type dest_repo_id: str :param criteria: optional; if specified, will filter the units retrieved from the source repository :type criteria: pulp.server.db.model.criteria.UnitAssociationCriteria :param import_config_override: optional config containing values to use for this import only :type import_config_override: dict :return: dict with key 'units_successful' whose value is a list of unit keys that were copied. units that were associated by this operation :rtype: dict :raise MissingResource: if either of the specified repositories don't exist """ criteria = UnitAssociationCriteria.from_dict(criteria) source_repo = model.Repository.objects.get_repo_or_missing_resource(source_repo_id) dest_repo = model.Repository.objects.get_repo_or_missing_resource(dest_repo_id) dest_repo_importer = model.Importer.objects.get_or_404(repo_id=dest_repo_id) source_repo_importer = model.Importer.objects.get_or_404(repo_id=source_repo_id) # The docs are incorrect on the list_importer_types call; it actually # returns a dict with the types under key "types" for some reason. supported_type_ids = set(plugin_api.list_importer_types( dest_repo_importer.importer_type_id)['types']) # Get the unit types from the repo source repo source_repo_unit_types = set(source_repo.content_unit_counts.keys()) # Now we can make sure the destination repository's importer is capable # of importing either the selected units or all of the units if not source_repo_unit_types.issubset(supported_type_ids): raise exceptions.PulpCodedException(error_code=error_codes.PLP0044) transfer_units = None # if all source types have been converted to mongo - search via new style if source_repo_unit_types.issubset(set(plugin_api.list_unit_models())): transfer_units = RepoUnitAssociationManager._units_from_criteria(source_repo, criteria) else: # else, search via old style associate_us = load_associated_units(source_repo_id, criteria) # If units were supposed to be filtered but none matched, we're done if len(associate_us) == 0: # Return an empty list to indicate nothing was copied return {'units_successful': []} # Convert all of the units into the plugin standard representation if # a filter was specified transfer_units = None if associate_us is not None: transfer_units = create_transfer_units(associate_us) # Convert the two repos into the plugin API model transfer_dest_repo = dest_repo.to_transfer_repo() transfer_source_repo = source_repo.to_transfer_repo() # Invoke the importer importer_instance, plugin_config = plugin_api.get_importer_by_id( dest_repo_importer.importer_type_id) call_config = PluginCallConfiguration(plugin_config, dest_repo_importer.config, import_config_override) conduit = ImportUnitConduit( source_repo_id, dest_repo_id, source_repo_importer.importer_type_id, dest_repo_importer.importer_type_id) try: copied_units = importer_instance.import_units( transfer_source_repo, transfer_dest_repo, conduit, call_config, units=transfer_units) if isinstance(copied_units, tuple): suc_units_ids = [u.to_id_dict() for u in copied_units[0] if u is not None] unsuc_units_ids = [u.to_id_dict() for u in copied_units[1]] repo_controller.rebuild_content_unit_counts(dest_repo) return {'units_successful': suc_units_ids, 'units_failed_signature_filter': unsuc_units_ids} unit_ids = [u.to_id_dict() for u in copied_units if u is not None] repo_controller.rebuild_content_unit_counts(dest_repo) return {'units_successful': unit_ids} except Exception as e: msg = _('Exception from importer [%(i)s] while importing units into repository [%(r)s]') msg_dict = {'i': dest_repo_importer.importer_type_id, 'r': dest_repo_id} logger.exception(msg % msg_dict) raise (e, None, sys.exc_info()[2])
additional_tasks = [] errors = [] for bind in consumer_bind_manager.find_by_repo(repo_id): try: report = consumer_controller.unbind(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: errors.append(e) error = None if len(errors) > 0: error = pulp_exceptions.PulpCodedException(error_codes.PLP0007, repo_id=repo_id) error.child_exceptions = errors return TaskResult(error=error, spawned_tasks=additional_tasks) def update_repo_and_plugins(repo, repo_delta, importer_config, distributor_configs): """ Update a reposiory and its related collections. All details do not need to be specified; if a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt to update the repository object, then the importer, then the
additional_tasks = [] options = {} bind_manager = managers.consumer_bind_manager() for bind in bind_manager.find_by_distributor(repo_id, dist_id): try: report = bind_manager.unbind(bind['consumer_id'], bind['repo_id'], bind['distributor_id'], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: unbind_errors.append(e) bind_error = None if unbind_errors: bind_error = exceptions.PulpCodedException(PLP0003, repo_id=repo_id, distributor_id=dist_id) bind_error.child_exceptions = unbind_errors return TaskResult(error=bind_error, spawned_tasks=additional_tasks) def queue_update(distributor, config, delta): """ Dispatch a task to update a distributor. :param distributor: distributor to be updated :type distributor: pulp.server.db.model.Distributor :param config: A configuration dictionary for a distributor instance. The contents of this dict depends on the type of distributor. Values of None will remove they key from the config. Keys ommited from this dictionary will remain unchanged. :type config: dict