def run(self): """ Steps through the entire workflow of a repo sync. :return: A SyncReport detailing how the sync went :rtype: pulp.plugins.model.SyncReport """ # Empty list could be returned in case _parse_as_mirrorlist() # was not able to find any valid url if not self.sync_feed: raise PulpCodedException(error_code=error_codes.RPM1004, reason='Not found') url_count = 0 for url in self.sync_feed: # Verify that we have a feed url. # if there is no feed url, then we have nothing to sync if url is None: raise PulpCodedException(error_code=error_codes.RPM1005) # using this tmp dir ensures that cleanup leaves nothing behind, # since we delete below self.tmp_dir = tempfile.mkdtemp(dir=self.working_dir) url_count += 1 try: with self.update_state(self.progress_report['metadata']): metadata_files = self.check_metadata(url) self.fix_metadata(metadata_files) metadata_files = self.get_metadata(metadata_files) # Save the default checksum from the metadata self.save_default_metadata_checksum_on_repo(metadata_files) with self.update_state(self.content_report) as skip: if not (skip or self.skip_repomd_steps): self.update_content(metadata_files, url) except PulpCodedException, e: # Check if the caught exception indicates that the mirror is # bad. # Try next mirror in the list without raising the exception. # In case it was the last mirror in the list, raise the # exception. bad_mirror_exceptions = [ error_codes.RPM1004, error_codes.RPM1006 ] # noqa if (e.error_code in bad_mirror_exceptions) and \ url_count != len(self.sync_feed): continue else: self._set_failed_state(e) raise except Exception, e: # In case other exceptions were caught that are not related to # the state of the mirror, raise the exception immediately and # do not iterate throught the rest of the mirrors. _logger.exception(e) self._set_failed_state(e) report = self.conduit.build_failure_report( self._progress_summary, self.progress_report) return report
def _concatenate_units(self, existing_unit, new_unit): """ Perform unit concatenation. :param existing_unit: The unit that is already in the DB :type existing_unit: pulp.plugins.model.Unit :param new_unit: The unit we are combining with the existing unit :type new_unit: pulp.server.db.model.ContentUnit """ if existing_unit._content_type_id != new_unit._content_type_id: raise PulpCodedException(message="Cannot concatenate two units of different types. " "Tried to concatenate %s with %s" % (existing_unit.type_id, new_unit.type_id)) if existing_unit.unit_key != new_unit.unit_key: raise PulpCodedException(message="Concatenated units must have the same unit key. " "Tried to concatenate %s with %s" % (existing_unit.unit_key, new_unit.unit_key)) if isinstance(existing_unit, models.Errata): existing_unit.merge_errata(new_unit) else: raise PulpCodedException(message="Concatenation of unit type %s is not supported" % existing_unit.type_id) # return the unit now that we've possibly modified it. return existing_unit
def filter_signature(unit, config): """ Filter package based on GPG signature and allowed GPG key IDs :param unit: model instance of the package :type unit: pulp_rpm.plugins.db.models.RPM/DRPM/SRPM :param config: configuration instance passed to the importer :type config: pulp.plugins.config.PluginCallConfiguration :raise: PulpCodedException if the package signing key ID does not exist or is not allowed """ signing_key = unit.signing_key require_signature = config.get(constants.CONFIG_REQUIRE_SIGNATURE, False) allowed_keys = config.get(constants.CONFIG_ALLOWED_KEYS, []) if require_signature and not signing_key: raise PulpCodedException(error_code=error_codes.RPM1013, package=unit.filename) if allowed_keys: allowed_keys = [key.lower() for key in allowed_keys] if signing_key and signing_key not in allowed_keys: raise PulpCodedException(error_code=error_codes.RPM1014, key=signing_key, package=unit.filename, allowed=allowed_keys)
def _concatenate_units(self, existing_unit, new_unit): """ Perform unit concatenation. :param existing_unit: The unit that is already in the DB :type existing_unit: pulp.plugins.model.Unit :param new_unit: The unit we are combining with the existing unit :type new_unit: pulp.server.db.model.ContentUnit """ if existing_unit._content_type_id != new_unit._content_type_id: raise PulpCodedException(message="Cannot concatenate two units of different types. " "Tried to concatenate %s with %s" % (existing_unit.type_id, new_unit.type_id)) if existing_unit.unit_key != new_unit.unit_key: raise PulpCodedException(message="Concatenated units must have the same unit key. " "Tried to concatenate %s with %s" % (existing_unit.unit_key, new_unit.unit_key)) if isinstance(existing_unit, models.Errata): # add in anything from new_unit that we don't already have. We key # package lists by name for this concatenation. existing_package_list_names = [p['name'] for p in existing_unit.pkglist] for possible_new_pkglist in new_unit.pkglist: if possible_new_pkglist['name'] not in existing_package_list_names: existing_unit.pkglist += [possible_new_pkglist] else: raise PulpCodedException(message="Concatenation of unit type %s is not supported" % existing_unit.type_id) # return the unit now that we've possibly modified it. return existing_unit
def _load_unit_models(self): """" Load all of the Unit Models from the ENTRY_POINT_UNIT_MODELS entry point Attach the signals to the models here since the mongoengine signals will not be sent correctly if they are attached to the base class. :raises: PLP0038 if two models are defined with the same id :raises: PLP0039 if a model is not a subclass of ContentUnit """ _logger.debug(_("Loading Unit Models")) for entry_point in pkg_resources.iter_entry_points( ENTRY_POINT_UNIT_MODELS): msg = _('Loading unit model: %s' % str(entry_point)) _logger.info(msg) model_id = entry_point.name model_class = entry_point.load() class_name = model_class.__class__.__module__ + "." + model_class.__class__.__name__ if not issubclass(model_class, ContentUnit): raise PulpCodedException(error_code=error_codes.PLP0039, model_id=model_id, model_class=class_name) if model_id in self.unit_models: raise PulpCodedException(error_code=error_codes.PLP0038, model_id=model_id, model_class=class_name) self.unit_models[model_id] = model_class # Attach all the signals model_class.attach_signals() _logger.debug(_("Unit Model Loading Completed"))
def test_validates_child_errors_not_present(self): mock_method = Mock(side_effect=PulpCodedValidationException( validation_exceptions=[ PulpCodedException(), PulpCodedException(error_codes.PLP0012) ])) self.assertRaises(AssertionError, util.assert_validation_exception, mock_method, error_codes=[error_codes.PLP0001])
def create_consumer_group(group_id, display_name=None, description=None, consumer_ids=None, notes=None): """ Create a new consumer group. :param group_id: unique id of the consumer group :type group_id: str :param display_name: display name of the consumer group :type display_name: str or None :param description: description of the consumer group :type description: str or None :param consumer_ids: list of ids for consumers initially belonging to the consumer group :type consumer_ids: list or None :param notes: notes for the consumer group :type notes: dict or None :return: SON representation of the consumer group :rtype: bson.SON """ validation_errors = [] if group_id is None: validation_errors.append(PulpCodedException(error_codes.PLP1002, field='group_id')) elif _CONSUMER_GROUP_ID_REGEX.match(group_id) is None: validation_errors.append(PulpCodedException(error_codes.PLP1003, field='group_id')) if consumer_ids: # Validate that all the consumer_ids exist and raise an exception if they don't consumer_collection = Consumer.get_collection() matched_consumers = consumer_collection.find({'id': {'$in': consumer_ids}}) if matched_consumers.count() is not len(consumer_ids): # Create a set of all the matched consumer_ids matched_consumers_set = set() for consumer in matched_consumers: matched_consumers_set.add(consumer.get('id')) # find the missing items for consumer_id in (set(consumer_ids)).difference(matched_consumers_set): validation_errors.append(PulpCodedException(error_codes.PLP1001, consumer_id=consumer_id)) if validation_errors: raise pulp_exceptions.PulpCodedValidationException(validation_errors) collection = ConsumerGroup.get_collection() consumer_group = ConsumerGroup(group_id, display_name, description, consumer_ids, notes) try: collection.insert(consumer_group) except DuplicateKeyError: raise pulp_exceptions.DuplicateResource(group_id), None, sys.exc_info()[2] group = collection.find_one({'id': group_id}) return group
def process_main(self): """ This method is the main method executed when the step system executes a step. """ (successful, output) = self.rsync() if not successful: raise PulpCodedException(message=output)
def _pull(self, path, remote_id, branch_id): """ Pull the specified branch. :param path: The absolute path to the local repository. :type path: str :param remote_id: The remote ID. :type remote_id: str :param branch_id: The branch to pull. :type branch_id: str :raises PulpCodedException: """ def report_progress(report): data = dict( b=branch_id, f=report.fetched, r=report.requested, p=report.percent ) self.progress_details = 'branch: %(b)s fetching %(f)d/%(r)d %(p)d%%' % data self.report_progress(force=True) try: repository = lib.Repository(path) repository.pull(remote_id, [branch_id], report_progress) except lib.LibError, le: pe = PulpCodedException(errors.OST0002, branch=branch_id, reason=str(le)) raise pe
def cancel(task_id): """ Cancel the task that is represented by the given task_id. This method cancels only the task with given task_id, not the spawned tasks. This also updates task's state to 'canceled'. :param task_id: The ID of the task you wish to cancel :type task_id: basestring :raises MissingResource: if a task with given task_id does not exist :raises PulpCodedException: if given task is already in a complete state """ task_status = TaskStatusManager.find_by_task_id(task_id) if task_status is None: raise MissingResource(task_id) if task_status['state'] in constants.CALL_COMPLETE_STATES: raise PulpCodedException(PLP0023, task_id=task_id) controller.revoke(task_id, terminate=True) TaskStatus.get_collection().find_and_modify( { 'task_id': task_id, 'state': { '$nin': constants.CALL_COMPLETE_STATES } }, {'$set': { 'state': constants.CALL_CANCELED_STATE }}) msg = _('Task canceled: %(task_id)s.') msg = msg % {'task_id': task_id} logger.info(msg)
def _pull(self, path, remote_id, refs, depth): """ Pull the specified branch. :param path: The absolute path to the local repository. :type path: str :param remote_id: The remote ID. :type remote_id: str :param refs: The refs to pull. :type refs: list :param depth: The tree traversal depth. :type depth: int :raises PulpCodedException: """ def report_progress(report): data = dict(f=report.fetched, r=report.requested, p=report.percent) self.progress_details = 'fetching %(f)d/%(r)d %(p)d%%' % data self.report_progress(force=True) try: repository = lib.Repository(path) repository.pull(remote_id, refs, report_progress, depth) except lib.LibError, le: pe = PulpCodedException(errors.OST0002, reason=str(le)) raise pe
def sanitize_checksum_type(checksum_type): """ Sanitize and validate the checksum type. This function will always return the given checksum_type in lower case, unless it is sha, in which case it will return "sha1". SHA and SHA-1 are the same algorithm, and so we prefer to use "sha1", since it is a more specific name. For some unit types (such as RPM), this can cause conflicts inside of Pulp when repos or uploads use a mix of sha and sha1. See https://bugzilla.redhat.com/show_bug.cgi?id=1165355 This function also validates that the checksum_type is a recognized one from the list of known hashing algorithms. :param checksum_type: The checksum type we are sanitizing :type checksum_type: basestring :return: A sanitized checksum type, converting "sha" to "sha1", otherwise returning the given checksum_type in lowercase. :rtype: basestring :raises PulpCodedException: if the checksum type is not recognized """ lowercase_checksum_type = checksum_type.lower() if lowercase_checksum_type == "sha": lowercase_checksum_type = "sha1" if lowercase_checksum_type not in HASHLIB_ALGORITHMS: raise PulpCodedException(error_code=error_codes.PLP1005, checksum_type=checksum_type) return lowercase_checksum_type
def __init__(self, repo=None, conduit=None, config=None): """ This method initializes the SyncStep. It first validates the config to ensure that the required keys are present. It then constructs some needed items (such as a download config), and determines whether the feed URL is a Docker v2 registry or not. If it is, it instantiates child tasks that are appropriate for syncing a v2 registry, and if it is not it raises a NotImplementedError. :param repo: repository to sync :type repo: pulp.plugins.model.Repository :param conduit: sync conduit to use :type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: config object for the sync :type config: pulp.plugins.config.PluginCallConfiguration """ super(SyncStep, self).__init__( step_type=constants.SYNC_STEP_MAIN, repo=repo, conduit=conduit, config=config, plugin_type=constants.IMPORTER_TYPE_ID) self.description = _('Syncing Docker Repository') self._validate(config) download_config = nectar_config.importer_config_to_nectar_config(config.flatten()) upstream_name = config.get(constants.CONFIG_KEY_UPSTREAM_NAME) url = config.get(importer_constants.KEY_FEED) # The DownloadMetadataSteps will set these to a list of Manifests and Blobs self.available_manifests = [] self.available_blobs = [] # Unit keys, populated by v1_sync.GetMetadataStep self.v1_available_units = [] # populated by v1_sync.GetMetadataStep self.v1_tags = {} # Create a Repository object to interact with. self.index_repository = registry.V2Repository( upstream_name, download_config, url, self.get_working_dir()) self.v1_index_repository = registry.V1Repository(upstream_name, download_config, url, self.get_working_dir()) # determine which API versions are supported and add corresponding steps v2_enabled = config.get(constants.CONFIG_KEY_ENABLE_V2, default=True) v1_enabled = config.get(constants.CONFIG_KEY_ENABLE_V1, default=False) if not v2_enabled: _logger.debug(_('v2 API skipped due to config')) if not v1_enabled: _logger.debug(_('v1 API skipped due to config')) v2_found = v2_enabled and self.index_repository.api_version_check() v1_found = v1_enabled and self.v1_index_repository.api_version_check() if v2_found: _logger.debug(_('v2 API found')) self.add_v2_steps(repo, conduit, config) if v1_found: _logger.debug(_('v1 API found')) self.add_v1_steps(repo, config) if not any((v1_found, v2_found)): raise PulpCodedException(error_code=error_codes.DKR1008, registry=url)
def _get_predistributor(self): """ Returns the distributor that is configured as predistributor. """ predistributor = self.get_config().flatten().get("predistributor_id", None) if predistributor: return Distributor.objects.get_or_404(repo_id=self.repo.id, distributor_id=predistributor) else: raise PulpCodedException(error_code=error_codes.RPM1011)
def process_main(self): """ Call out to createrepo command line in order to process the files. """ pipe = subprocess.Popen('createrepo_c -d --update --keep-all-metadata --skip-stat %s' % self.content_dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pipe.communicate() if pipe.returncode != 0: result_string = '%s\n::\n%s' % (stdout, stderr) raise PulpCodedException(message=result_string)
def process_main(self, item=None): """ Add/update the remote summary information in the repository scratchpad. """ try: lib_repository = lib.Repository(self.parent.storage_dir) remote = lib.Remote(self.parent.repo_id, lib_repository) refs = [r.dict() for r in remote.list_refs()] except lib.LibError, le: pe = PulpCodedException(errors.OST0005, reason=str(le)) raise pe
def __init__(self, **kwargs): super(Main, self).__init__(step_type=constants.IMPORT_STEP_MAIN, plugin_type=constants.WEB_IMPORTER_TYPE_ID, **kwargs) if not self.feed_url: raise PulpCodedException(errors.OST0004) self.remote_id = model.generate_remote_id(self.feed_url) self.add_child(Create()) self.add_child(Summary()) self.add_child(Pull()) self.add_child(Add()) self.add_child(Clean())
def _get_package_checksum_tuple(self, package): """ Decide which checksum to publish for the given package in the erratum package list. If updateinfo_checksum_type is requested explicitly, the checksum of this type will be published. If no checksum_type is requested, the checksum of the distributor checksum type will be published, if available. Otherwise the longest one will be chosen. Handle two possible ways of specifying the checksum in the erratum package list: - in the `sum` package field as a list of alternating checksum types and values, e.g. ['type1', 'checksum1', 'type2', 'checksum2'] - in the `type` and `sums` package fields. It is only the case when the erratum was uploaded via pulp-admin. Only one type of the checksum could be specified this way. :param package: package from the erratum package list :type package: dict :return: checksum type and value to publish. An empty tuple is returned if there is no checksum available. :rtype: tuple :raises PulpCodedException: if updateinfo_checksum_type is not available """ package_checksum_tuple = () dist_checksum_type = self.checksum_type package_checksums = package.get('sum') or [] if package.get('type'): package_checksums += [package['type'], package.get('sums')] for checksum_type in (self.updateinfo_checksum_type, dist_checksum_type): try: checksum_index = package_checksums.index(checksum_type) + 1 except (ValueError, IndexError): # raise exception if updateinfo_checksum_type is unavailable if self.updateinfo_checksum_type and \ checksum_type == self.updateinfo_checksum_type: raise PulpCodedException( error_codes.RPM1012, checksumtype=self.updateinfo_checksum_type) continue else: checksum_value = package_checksums[checksum_index] package_checksum_tuple = (checksum_type, checksum_value) break else: if package_checksums: # choose the longest(the best?) checksum available checksum_value = max(package_checksums[1::2], key=len) checksum_type_index = package_checksums.index( checksum_value) - 1 checksum_type = package_checksums[checksum_type_index] package_checksum_tuple = (checksum_type, checksum_value) return package_checksum_tuple
def finalize(self): super(RepomdXMLFileContext, self).finalize() if self.gpg_sign: command = ('gpg --yes --detach-sign --armor %(metadata_file_path)s' % {'metadata_file_path': self.metadata_file_path}) pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pipe.communicate() if pipe.returncode != 0: raise PulpCodedException(error_codes.RPM0001, command=command, stdout=stdout, stderr=stderr)
def process_main(self): """ Call out to createrepo command line in order to process the files. """ checksum_type = self.parent.get_checksum_type() pipe = subprocess.Popen('createrepo_c -d --update --keep-all-metadata ' '-s %(checksum_type)s --skip-stat %(content_dir)s' % {'checksum_type': checksum_type, 'content_dir': self.content_dir}, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pipe.communicate() if pipe.returncode != 0: result_string = '%s\n::\n%s' % (stdout, stderr) raise PulpCodedException(message=result_string)
def process_main(self, item=None): """ Clean up after import: - Delete the remote used for the pull. """ path = self.parent.storage_dir remote_id = self.parent.repo_id try: repository = lib.Repository(path) remote = lib.Remote(remote_id, repository) remote.delete() except lib.LibError, le: pe = PulpCodedException(errors.OST0003, id=remote_id, reason=str(le)) raise pe
def check_metadata(self, url): """ :param url: curret URL we should sync :type url: str :return: instance of MetadataFiles :rtype: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles """ _logger.info(_('Downloading metadata from %(feed)s.') % {'feed': url}) metadata_files = metadata.MetadataFiles(url, self.tmp_dir, self.nectar_config, self._url_modify) try: metadata_files.download_repomd() except IOError, e: raise PulpCodedException(error_code=error_codes.RPM1004, reason=str(e))
def process_main(self, item=None): """ Call out to repoview command line in order to process the files. """ pipe = subprocess.Popen('repoview --title %(repo)s %(content_dir)s' % { 'content_dir': self.content_dir, 'repo': self.get_repo().id }, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = pipe.communicate() if pipe.returncode != 0: raise PulpCodedException(error_codes.RPM0001, command='repoview', stdout=stdout, stderr=stderr)
def _do_publish(repo, distributor_id, distributor_instance, transfer_repo, conduit, call_config): distributor_coll = RepoDistributor.get_collection() publish_result_coll = RepoPublishResult.get_collection() repo_id = repo['id'] # Perform the publish publish_start_timestamp = _now_timestamp() try: # Add the register_sigterm_handler decorator to the publish_repo call, so that we can # respond to signals by calling the Distributor's cancel_publish_repo() method. publish_repo = register_sigterm_handler( distributor_instance.publish_repo, distributor_instance.cancel_publish_repo) publish_report = publish_repo(transfer_repo, conduit, call_config) if publish_report is not None and hasattr(publish_report, 'success_flag') \ and not publish_report.success_flag: raise PulpCodedException(error_code=error_codes.PLP0034, repository_id=repo_id, distributor_id=distributor_id) except Exception, e: publish_end_timestamp = _now_timestamp() # Reload the distributor in case the scratchpad is set by the plugin repo_distributor = distributor_coll.find_one({ 'repo_id': repo_id, 'id': distributor_id }) repo_distributor['last_publish'] = publish_end_timestamp distributor_coll.save(repo_distributor, safe=True) # Add a publish history entry for the run result = RepoPublishResult.error_result( repo_id, repo_distributor['id'], repo_distributor['distributor_type_id'], publish_start_timestamp, publish_end_timestamp, e, sys.exc_info()[2]) publish_result_coll.save(result, safe=True) _logger.exception( _('Exception caught from plugin during publish for repo [%(r)s]' % {'r': repo_id})) raise
def get_metadata(self): """ :return: instance of MetadataFiles where each relevant file has been identified and downloaded. :rtype: pulp_rpm.plugins.importers.yum.repomd.metadata.MetadataFiles """ _logger.info( _('Downloading metadata from %(feed)s.') % {'feed': self.sync_feed}) metadata_files = metadata.MetadataFiles(self.sync_feed, self.tmp_dir, self.nectar_config) # allow the downloader to be accessed by the cancel method if necessary self.downloader = metadata_files.downloader try: metadata_files.download_repomd() except IOError, e: raise PulpCodedException(error_code=error_codes.RPM1004, reason=str(e))
def parse_repomd(self): """ Parse the downloaded repomd.xml file and populate the metadata dictionary. :raises PulpCodedException: if any of necessary metadata files are not found """ repomd_file_path = os.path.join(self.dst_dir, REPOMD_FILE_NAME) if not os.access(repomd_file_path, os.F_OK | os.R_OK): raise RuntimeError('%s has not been downloaded' % REPOMD_FILE_NAME) parser = iterparse(repomd_file_path, events=('start', 'end')) xml_iterator = iter(parser) # get a hold of the root element so that we can clear it # this prevents the entire parsed document from building up in memory try: root_element = xml_iterator.next()[1] except SyntaxError: raise ValueError('could not parse repo metadata') for event, element in xml_iterator: if event != 'end': continue root_element.clear() if element.tag == REVISION_TAG: try: self.revision = int(element.text) except (TypeError, ValueError): _LOGGER.info('repository revision is not an integer. ' 'unable to consider skipping steps.') self.revision = 0 if element.tag == DATA_TAG: file_info = process_repomd_data_element(element) self.metadata[file_info['name']] = file_info for metadata_type in constants.MANDATORY_METADATA_TYPES: if metadata_type not in self.metadata: reason = '"%s" metadata is not found in repomd.xml' % metadata_type raise PulpCodedException(error_code=error_codes.RPM1015, reason=reason)
def process_main(self, item=None): """ Add/update the remote summary information in the repository scratchpad. """ try: lib_repository = lib.Repository(self.parent.storage_dir) remote = lib.Remote(self.parent.repo_id, lib_repository) refs = [r.dict() for r in remote.list_refs()] except lib.LibError as le: pe = PulpCodedException(errors.OST0005, reason=str(le)) raise pe repository = self.get_repo().repo_obj map(self.clean_metadata, refs) repository.scratchpad.update( {constants.REMOTE: { constants.SUMMARY: refs }}) repository.save()
def process_main(self, item=None): """ Repair corrupted local repository. The only option to repair a corrupted repository is to re-create it. Separate step because: - shutil.rmtree() is very slow and should be reflected in progress reporting. - anticipating better tools provided by libostree for doing the repair in the future. :raises PulpCodedException: """ path = self.parent.storage_dir shutil.rmtree(path, ignore_errors=True) try: repository = lib.Repository(path) repository.create() except lib.LibError as le: pe = PulpCodedException(errors.OST0007, path=path, reason=str(le)) raise pe
def process_main(self, item=None): """ Ensure the local ostree repository has been created and the configured. Also creates and configures a temporary remote used for the subsequent pulls. :raises PulpCodedException: """ path = self.parent.storage_dir try: repository = lib.Repository(path) try: repository.open() except lib.LibError: repository.create() remote = Remote(self, repository) remote.add() except lib.LibError, le: pe = PulpCodedException(errors.OST0001, path=path, reason=str(le)) raise pe
def __init__(self, metadata_file_path, checksum_type=None): """ :param metadata_file_path: full path to metadata file to be generated :type metadata_file_path: str :param checksum_type: checksum type to be used to generate and prepend checksum to the file names of files. If checksum_type is None, no checksum is added to the filename :type checksum_type: str or None """ self.metadata_file_path = metadata_file_path self.metadata_file_handle = None self.checksum_type = checksum_type self.checksum = None if self.checksum_type is not None: checksum_function = CHECKSUM_FUNCTIONS.get(checksum_type) if not checksum_function: raise PulpCodedValidationException( [PulpCodedException(error_codes.PLP1005, checksum_type=checksum_type)]) self.checksum_constructor = checksum_function
def update_repo_and_plugins(repo_id, repo_delta, importer_config, distributor_configs): """ Aggregate method that will update one or more of the following: * Repository metadata * Importer config * Zero or more distributors on the repository All of the above pieces do not need to be specified. If a piece is omitted it's configuration is not touched, nor is it removed from the repository. The same holds true for the distributor_configs dict, not every distributor must be represented. This call will attempt the updates in the order listed above. If an exception occurs during any of these steps, the updates stop and the exception is immediately raised. Any updates that have already taken place are not rolled back. This call will call out to RepoImporterManager.update_importer_config. Documentation for that method, especially possible exceptions, should be consulted for more information. Distributor updates will happen asynchronously as there could be a very large number of consumers to update and the repo update call is usually made synchronously. :param repo_id: unique identifier for the repo :type repo_id: str :param repo_delta: list of attributes and their new values to change; if None, no attempt to update the repo's metadata will be made :type repo_delta: dict, None :param importer_config: new configuration to use for the repo's importer; if None, no attempt will be made to update the importer :type importer_config: dict, None :param distributor_configs: mapping of distributor ID to the new configuration to set for it :type distributor_configs: dict, None :return: updated repository object, same as returned from update_repo :rtype: TaskResult """ # Repo Update if repo_delta is None: repo_delta = {} repo = RepoManager.update_repo(repo_id, repo_delta) # Importer Update if importer_config is not None: importer_manager = manager_factory.repo_importer_manager() importer_manager.update_importer_config(repo_id, importer_config) errors = [] additional_tasks = [] # Distributor Update if distributor_configs is not None: for dist_id, dist_config in distributor_configs.items(): update_result = repository.distributor_update(repo_id, dist_id, dist_config, None) additional_tasks.extend(update_result.spawned_tasks) errors.append(update_result.error) error = None if len(errors) > 0: error = PulpCodedException(error_code=error_codes.PLP0006, repo_id=repo_id) error.child_exceptions = errors return TaskResult(repo, error, additional_tasks)
options = {} manager = managers.consumer_bind_manager() additional_tasks = [] errors = [] for bind in manager.find_by_repo(repo_id): try: report = consumer.unbind(bind["consumer_id"], bind["repo_id"], bind["distributor_id"], options) if report: additional_tasks.extend(report.spawned_tasks) except Exception, e: errors.append(e) error = None if len(errors) > 0: error = PulpCodedException(PLP0007, repo_id=repo_id) error.child_exceptions = errors return TaskResult(error=error, spawned_tasks=additional_tasks) @celery.task(base=Task) def distributor_delete(repo_id, distributor_id): """ Get the itinerary for deleting a repository distributor. 1. Delete the distributor on the sever. 2. Unbind any bound consumers. :param repo_id: A repository ID. :type repo_id: str :param distributor_id: A distributor id :type distributor_id: str