def process_main(self, item=None): """ Update tags based on the parent metadata :param item: Not used by this step :type item: None """ md = self.parent.metadata tag = md.get('name') if tag is None: raise PulpCodedValidationException(error_code=error_codes.DKR1019, field='name') # https://pulp.plan.io/issues/3250 - use manifest_digest if available digest = md.get('manifest_digest', md.get('digest')) if digest is None: raise PulpCodedValidationException(error_code=error_codes.DKR1019, field='manifest_digest') pulp_user_metadata = md.get('pulp_user_metadata') repo_id = self.parent.repo.id manifest_type_id = models.Manifest._content_type_id.default repo_manifest_ids = repository.get_associated_unit_ids( repo_id, manifest_type_id) # check if there is manifest with such id within the queried repo # since we don't know if the provided digest is of an image manifest or manifest list # we need to try both. manifests = models.Manifest.objects.filter(digest=digest, id__in=repo_manifest_ids) manifest_type = constants.MANIFEST_IMAGE_TYPE if manifests.count() == 0: manifest_list_type_id = models.ManifestList._content_type_id.default repo_manifest_list_ids = repository.get_associated_unit_ids( repo_id, manifest_list_type_id) manifests = models.ManifestList.objects.filter( digest=digest, id__in=repo_manifest_list_ids) manifest_type = constants.MANIFEST_LIST_TYPE if manifests.count() == 0: raise PulpCodedValidationException( error_code=error_codes.DKR1010, digest=digest, repo_id=repo_id) new_tag = models.Tag.objects.tag_manifest( repo_id=self.parent.repo.id, tag_name=tag, manifest_digest=digest, schema_version=manifests[0].schema_version, manifest_type=manifest_type, pulp_user_metadata=pulp_user_metadata) if new_tag: repository.associate_single_unit(self.parent.repo.repo_obj, new_tag) self.parent.uploaded_unit = new_tag
def process_main(self, item=None): """ Update tags based on the parent metadata :param item: Not used by this step :type item: None """ tag = self.parent.metadata['name'] digest = self.parent.metadata['digest'] repo_id = self.parent.repo.id manifest_type_id = models.Manifest._content_type_id.default repo_manifest_ids = repository.get_associated_unit_ids( repo_id, manifest_type_id) # check if there is manifest with such id within the queried repo # since we don't know if the provided digest is of an image manifest or manifest list # we need to try both. manifests = models.Manifest.objects.filter(digest=digest, id__in=repo_manifest_ids) manifest_type = constants.MANIFEST_IMAGE_TYPE if manifests.count() == 0: manifest_list_type_id = models.ManifestList._content_type_id.default repo_manifest_list_ids = repository.get_associated_unit_ids( repo_id, manifest_list_type_id) manifests = models.ManifestList.objects.filter( digest=digest, id__in=repo_manifest_list_ids) manifest_type = constants.MANIFEST_LIST_TYPE if manifests.count() == 0: raise PulpCodedValidationException( error_code=error_codes.DKR1010, digest=digest, repo_id=repo_id) new_tag = models.Tag.objects.tag_manifest( repo_id=self.parent.repo.id, tag_name=tag, manifest_digest=digest, schema_version=manifests[0].schema_version, manifest_type=manifest_type) if new_tag: repository.associate_single_unit(self.parent.repo.repo_obj, new_tag)
def process_main(self, item=None): """ Update tags based on the parent metadata :param item: Not used by this step :type item: None """ tag = self.parent.metadata['name'] digest = self.parent.metadata['digest'] repo_id = self.parent.repo.id manifest_type_id = models.Manifest._content_type_id.default repo_manifest_ids = repository.get_associated_unit_ids(repo_id, manifest_type_id) if models.Manifest.objects(digest=digest, id__in=repo_manifest_ids).count() == 0: raise PulpCodedValidationException(error_code=error_codes.DKR1010, digest=digest, repo_id=repo_id) new_tag = models.Tag.objects.tag_manifest(repo_id=self.parent.repo.id, tag_name=tag, manifest_digest=digest) if new_tag: repository.associate_single_unit(self.parent.repo.repo_obj, new_tag)
def check_all_and_associate(wanted, conduit, config, download_deferred, catalog): """ Given a set of unit keys as namedtuples, this function checks if a unit already exists in Pulp and returns the set of tuples that were not found. This checks for the unit in the db as well as for the actual file on the filesystem. If a unit exists in the db and the filesystem, this function also associates the unit to the given repo. Note that the check for the actual file is performed only for the supported unit types. :param wanted: dict where keys are units as namedtuples, and values are WantedUnitInfo instances :type wanted: dict :param conduit: repo sync conduit :type conduit: pulp.plugins.conduits.repo_sync.RepoSync :param config: configuration instance passed to the importer :type config: pulp.plugins.config.PluginCallConfiguration :param download_deferred: indicates downloading is deferred (or not). :type download_deferred: bool :param catalog: Deferred downloading catalog. :type catalog: pulp_rpm.plugins.importers.yum.sync.PackageCatalog :return: set of unit keys as namedtuples, identifying which of the named tuples received as input were not found on the server. :rtype: set """ rpm_drpm_srpm = (ids.TYPE_ID_RPM, ids.TYPE_ID_SRPM, ids.TYPE_ID_DRPM) all_associated_units = set() for unit_type in rpm_drpm_srpm: units_generator = repo_controller.get_associated_unit_ids(conduit.repo.repo_id, unit_type) all_associated_units.update(units_generator) sorted_units = _sort_by_type(wanted.iterkeys()) for unit_type, values in sorted_units.iteritems(): model = plugin_api.get_unit_model_by_id(unit_type) # FIXME "fields" does not get used, but it should # fields = model.unit_key_fields + ('_storage_path',) unit_generator = (model(**unit_tuple._asdict()) for unit_tuple in values.copy()) for unit in units_controller.find_units(unit_generator): is_rpm_drpm_srpm = unit_type in rpm_drpm_srpm file_exists = unit._storage_path is not None and os.path.isfile(unit._storage_path) if is_rpm_drpm_srpm: # no matter what is the download policy, if existing unit has a valid storage_path, # we need to set the downloaded flag to True if file_exists and not unit.downloaded: unit.downloaded = True unit.save() # Existing RPMs, DRPMs and SRPMs are disqualified when the associated # package file does not exist and downloading is not deferred. if not download_deferred and not file_exists: continue catalog.add(unit, wanted[unit.unit_key_as_named_tuple].download_path) if unit.id not in all_associated_units: if rpm_parse.signature_enabled(config): try: rpm_parse.filter_signature(unit, config) except PulpCodedException as e: _LOGGER.debug(e) continue repo_controller.associate_single_unit(conduit.repo, unit) values.discard(unit.unit_key_as_named_tuple) still_wanted = set() still_wanted.update(*sorted_units.values()) return still_wanted
def check_all_and_associate(wanted, conduit, config, download_deferred, catalog): """ Given a set of unit keys as namedtuples, this function checks if a unit already exists in Pulp and returns the set of tuples that were not found. This checks for the unit in the db as well as for the actual file on the filesystem. If a unit exists in the db and the filesystem, this function also associates the unit to the given repo. Note that the check for the actual file is performed only for the supported unit types. :param wanted: dict where keys are units as namedtuples, and values are WantedUnitInfo instances :type wanted: dict :param conduit: repo sync conduit :type conduit: pulp.plugins.conduits.repo_sync.RepoSync :param config: configuration instance passed to the importer :type config: pulp.plugins.config.PluginCallConfiguration :param download_deferred: indicates downloading is deferred (or not). :type download_deferred: bool :param catalog: Deferred downloading catalog. :type catalog: pulp_rpm.plugins.importers.yum.sync.PackageCatalog :return: set of unit keys as namedtuples, identifying which of the named tuples received as input were not found on the server. :rtype: set """ rpm_drpm_srpm = (ids.TYPE_ID_RPM, ids.TYPE_ID_SRPM, ids.TYPE_ID_DRPM) all_associated_units = set() for unit_type in rpm_drpm_srpm: units_generator = repo_controller.get_associated_unit_ids( conduit.repo.repo_id, unit_type) all_associated_units.update(units_generator) sorted_units = _sort_by_type(wanted.iterkeys()) for unit_type, values in sorted_units.iteritems(): model = plugin_api.get_unit_model_by_id(unit_type) # FIXME "fields" does not get used, but it should # fields = model.unit_key_fields + ('_storage_path',) unit_generator = (model(**unit_tuple._asdict()) for unit_tuple in values.copy()) for unit in units_controller.find_units(unit_generator): is_rpm_drpm_srpm = unit_type in rpm_drpm_srpm file_exists = unit._storage_path is not None and os.path.isfile( unit._storage_path) if is_rpm_drpm_srpm: # no matter what is the download policy, if existing unit has a valid storage_path, # we need to set the downloaded flag to True if file_exists and not unit.downloaded: unit.downloaded = True unit.save() # Existing RPMs, DRPMs and SRPMs are disqualified when the associated # package file does not exist and downloading is not deferred. if not download_deferred and not file_exists: continue catalog.add(unit, wanted[unit.unit_key_as_named_tuple].download_path) if unit.id not in all_associated_units: if rpm_parse.signature_enabled(config): try: rpm_parse.filter_signature(unit, config) except PulpCodedException as e: _LOGGER.debug(e) continue repo_controller.associate_single_unit(conduit.repo, unit) values.discard(unit.unit_key_as_named_tuple) still_wanted = set() still_wanted.update(*sorted_units.values()) return still_wanted