コード例 #1
0
def process_distribution(feed, tmp_dir, nectar_config, model, report):
    """
    Get the pulp_distribution.xml file from the server and if it exists download all the
    files it references to add them to the distribution unit.

    :param feed:            URL to the repository
    :type  feed:            str
    :param tmp_dir:         full path to the temporary directory being used
    :type  tmp_dir:         str
    :param nectar_config:   download config to be used by nectar
    :type  nectar_config:   nectar.config.DownloaderConfig
    :param model:
    :type model:
    :param report:
    :type report:
    :return: list of file dictionaries
    :rtype: list of dict
    """
    # Get the Distribution file
    result = get_distribution_file(feed, tmp_dir, nectar_config)
    files = []
    # If there is a Distribution file - parse it and add all files to the file_list
    if result:
        xsd = os.path.join(constants.USR_SHARE_DIR, 'pulp_distribution.xsd')
        schema_doc = ET.parse(xsd)
        xmlschema = ET.XMLSchema(schema_doc)
        try:
            tree = ET.parse(result)
            xmlschema.assertValid(tree)
        except Exception, e:
            raise PulpCodedValidationException(validation_exceptions=[
                PulpCodedValidationException(error_code=error_codes.RPM1001,
                                             feed=feed,
                                             validation_exceptions=[e])
            ])

        model.metadata[
            constants.
            CONFIG_KEY_DISTRIBUTION_XML_FILE] = constants.DISTRIBUTION_XML
        # parse the distribution file and add all the files to the download request
        root = tree.getroot()
        for file_element in root.findall('file'):
            relative_path = file_element.text
            files.append({
                'relativepath': relative_path,
                'checksum': None,
                'checksumtype': None,
            })

        # Add the distribution file to the list of files
        files.append({
            'relativepath': constants.DISTRIBUTION_XML,
            'checksum': None,
            'checksumtype': None,
        })
コード例 #2
0
    def process_main(self, item=None):
        """
        Update tags based on the parent metadata

        :param item: Not used by this step
        :type  item: None
        """

        md = self.parent.metadata
        tag = md.get('name')
        if tag is None:
            raise PulpCodedValidationException(error_code=error_codes.DKR1019,
                                               field='name')
        # https://pulp.plan.io/issues/3250 - use manifest_digest if available
        digest = md.get('manifest_digest', md.get('digest'))
        if digest is None:
            raise PulpCodedValidationException(error_code=error_codes.DKR1019,
                                               field='manifest_digest')
        pulp_user_metadata = md.get('pulp_user_metadata')
        repo_id = self.parent.repo.id
        manifest_type_id = models.Manifest._content_type_id.default
        repo_manifest_ids = repository.get_associated_unit_ids(
            repo_id, manifest_type_id)

        # check if there is manifest with such id within the queried repo
        # since we don't know if the provided digest is of an image manifest or manifest list
        # we need to try both.
        manifests = models.Manifest.objects.filter(digest=digest,
                                                   id__in=repo_manifest_ids)
        manifest_type = constants.MANIFEST_IMAGE_TYPE
        if manifests.count() == 0:
            manifest_list_type_id = models.ManifestList._content_type_id.default
            repo_manifest_list_ids = repository.get_associated_unit_ids(
                repo_id, manifest_list_type_id)
            manifests = models.ManifestList.objects.filter(
                digest=digest, id__in=repo_manifest_list_ids)
            manifest_type = constants.MANIFEST_LIST_TYPE
            if manifests.count() == 0:
                raise PulpCodedValidationException(
                    error_code=error_codes.DKR1010,
                    digest=digest,
                    repo_id=repo_id)

        new_tag = models.Tag.objects.tag_manifest(
            repo_id=self.parent.repo.id,
            tag_name=tag,
            manifest_digest=digest,
            schema_version=manifests[0].schema_version,
            manifest_type=manifest_type,
            pulp_user_metadata=pulp_user_metadata)

        if new_tag:
            repository.associate_single_unit(self.parent.repo.repo_obj,
                                             new_tag)
        self.parent.uploaded_unit = new_tag
コード例 #3
0
    def validate_importer_config(repo_id, importer_type_id, importer_config):
        """
        Validate an importer configuration. This validates that the repository and importer type
        exist as these are both required to validate the configuration.

        :param repo_id:             identifies the repo
        :type  repo_id:             str
        :param importer_type_id:    identifies the type of importer being added;
                                    must correspond to an importer loaded at server startup
        :type  importer_type_id:    str
        :param importer_config:     configuration values for the importer; may be None
        :type  importer_config:     dict
        """
        repo_coll = Repo.get_collection()
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repo_id)

        if not plugin_api.is_valid_importer(importer_type_id):
            raise PulpCodedValidationException(
                error_code=error_codes.PLP1008,
                importer_type_id=importer_type_id)

        importer_instance, plugin_config = plugin_api.get_importer_by_id(
            importer_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if importer_config is not None:
            clean_config = dict([(k, v) for k, v in importer_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the importer plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.importer_working_dir(
            importer_type_id, repo_id)

        result = importer_instance.validate_config(transfer_repo, call_config)

        # For backward compatibility with plugins that don't yet return the tuple
        if isinstance(result, bool):
            valid_config = result
            message = None
        else:
            valid_config, message = result

        if not valid_config:
            raise PulpCodedValidationException(validation_errors=message)
コード例 #4
0
    def process_distribution(self, tmp_dir):
        """
        Get the pulp_distribution.xml file from the server and if it exists download all the
        files it references to add them to the distribution unit.

        :param tmp_dir: The absolute path to the temporary directory
        :type tmp_dir: str
        :return: A list of file dictionaries
        :rtype: list
        """
        # Get the Distribution file
        result = self.get_distribution_file(tmp_dir)
        files = []
        # If there is a Distribution file - parse it and add all files to the file_list
        if result:
            xsd = os.path.join(constants.USR_SHARE_DIR,
                               'pulp_distribution.xsd')
            schema_doc = ET.parse(xsd)
            xmlschema = ET.XMLSchema(schema_doc)
            try:
                tree = ET.parse(result)
                xmlschema.assertValid(tree)
            except Exception, e:
                raise PulpCodedValidationException(validation_exceptions=[
                    PulpCodedValidationException(
                        error_code=error_codes.RPM1001,
                        feed=self.feed,
                        validation_exceptions=[e])
                ])

            # This is broken and best I can tell - not used.
            # model.metadata[constants.CONFIG_KEY_DISTRIBUTION_XML_FILE] = \
            #     constants.DISTRIBUTION_XML

            # parse the distribution file and add all the files to the download request
            root = tree.getroot()
            for file_element in root.findall('file'):
                relative_path = file_element.text
                files.append({
                    RELATIVE_PATH: relative_path,
                    CHECKSUM: None,
                    CHECKSUM_TYPE: None,
                })

            # Add the distribution file to the list of files
            files.append({
                RELATIVE_PATH: constants.DISTRIBUTION_XML,
                CHECKSUM: None,
                CHECKSUM_TYPE: None,
            })
コード例 #5
0
ファイル: treeinfo.py プロジェクト: ulif/pulp_rpm
    def process_distribution(self, tmp_dir):
        """
        Get the pulp_distribution.xml file from the server and if it exists download all the
        files it references to add them to the distribution unit.

        :param tmp_dir: The absolute path to the temporary directory
        :type tmp_dir: str
        :return: A tuple that contains the list of file dictionaries and the absolute path
                 to the distribution file, or None if no PULP_DISTRIBUTION.xml was found.
        :rtype: (list, basestring)
        """
        # Get the Distribution file
        result = self.get_distribution_file(tmp_dir)
        files = []
        # If there is a Distribution file - parse it and add all files to the file_list
        if result:
            xsd = os.path.join(constants.USR_SHARE_DIR,
                               'pulp_distribution.xsd')
            schema_doc = ET.parse(xsd)
            xmlschema = ET.XMLSchema(schema_doc)
            try:
                tree = ET.parse(result)
                xmlschema.assertValid(tree)
            except Exception, e:
                raise PulpCodedValidationException(validation_exceptions=[
                    PulpCodedValidationException(
                        error_code=error_codes.RPM1001,
                        feed=self.feed,
                        validation_exceptions=[e])
                ])

            # parse the distribution file and add all the files to the download request
            root = tree.getroot()
            for file_element in root.findall('file'):
                relative_path = file_element.text
                if not relative_path.startswith('repodata/'):
                    files.append({
                        RELATIVE_PATH: relative_path,
                        CHECKSUM: None,
                        CHECKSUM_TYPE: None,
                    })

            # Add the distribution file to the list of files
            files.append({
                RELATIVE_PATH: constants.DISTRIBUTION_XML,
                CHECKSUM: None,
                CHECKSUM_TYPE: None,
            })
コード例 #6
0
ファイル: util.py プロジェクト: eslynunez/pulp_rpm
def errata_format_to_datetime(datetime_str, msg):
    """
    Convert known errata date-time formats to datetime object.

    Expected formats are:
     - '%Y-%m-%d %H:%M:%S'
     - '%Y-%m-%d %H:%M:%S UTC'

    :param datetime_str: date and time in errata specific format
    :type  datetime_str: str

    :param msg: additional error message in case of exception
    :type  msg: str

    :return: parsed date and time
    :rtype: datetime.datetime
    :raises ValueError: if the date and time are in unknown format
    """
    strptime_pattern = '%Y-%m-%d %H:%M:%S'
    datetime_str = datetime_str.strip()
    if datetime_str.endswith(' UTC'):
        datetime_str = datetime_str[:-4]

    try:
        datetime_obj = datetime.datetime.strptime(datetime_str,
                                                  strptime_pattern)
    except ValueError:
        raise PulpCodedValidationException(error_code=error_codes.RPM1007,
                                           details=msg,
                                           expected_format=strptime_pattern)

    return datetime_obj
コード例 #7
0
 def test_validates_error_codes_not_present(self):
     mock_method = Mock(side_effect=PulpCodedValidationException(
         validation_exceptions=[PulpCodedException()]))
     self.assertRaises(
         AssertionError,
         util.assert_validation_exception,
         mock_method,
         error_codes=[error_codes.PLP0001, error_codes.PLP0012])
コード例 #8
0
def validate_config(config):
    """
    Validate a configuration

    :param config: Pulp configuration for the distributor
    :type  config: pulp.plugins.config.PluginCallConfiguration
    :raises: PulpCodedValidationException if any validations failed
    :return: (True, None)
    :rtype: tuple
    """
    errors = []
    server_url = config.get(constants.CONFIG_KEY_REDIRECT_URL)
    if server_url:
        parsed = urlparse(server_url)
        if not parsed.scheme:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.OST1001,
                    field=constants.CONFIG_KEY_REDIRECT_URL,
                    url=server_url))
        if not parsed.netloc:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.OST1002,
                    field=constants.CONFIG_KEY_REDIRECT_URL,
                    url=server_url))
        if not parsed.path:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.OST1003,
                    field=constants.CONFIG_KEY_REDIRECT_URL,
                    url=server_url))
    protected = config.get(constants.CONFIG_KEY_PROTECTED)
    if protected:
        protected_parsed = config.get_boolean(constants.CONFIG_KEY_PROTECTED)
        if protected_parsed is None:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.OST1004,
                    field=constants.CONFIG_KEY_PROTECTED,
                    value=protected))

    if errors:
        raise PulpCodedValidationException(validation_exceptions=errors)

    return True, None
コード例 #9
0
 def wrapper(*args, **kwargs):
     request = args[1]
     if allow_empty and not request.body:
         request.body_as_json = {}
         return func(*args, **kwargs)
     try:
         request_json = json.loads(request.body)
     except ValueError:
         raise PulpCodedValidationException(
             error_code=error_codes.PLP1009)
     else:
         if not (json_type is None
                 or isinstance(request_json, json_type)):
             raise PulpCodedValidationException(
                 error_code=error_codes.PLP1015,
                 data_type=json_type.__name__)
         request.body_as_json = _ensure_input_encoding(request_json)
     return func(*args, **kwargs)
コード例 #10
0
    def process_main(self, item=None):
        """
        Save blobs and manifest to repository

        :param item: A Docker manifest or blob unit
        :type      : pulp_docker.plugins.models.Blob or pulp_docker.plugins.models.Manifest
        :return:     None
        """
        if isinstance(item, models.Blob):
            checksum_type, _, checksum = item.digest.rpartition(':')
            if not checksum_type:
                # Never assume. But oh well
                checksum_type = "sha256"
            blob_src_path = os.path.join(self.get_working_dir(),
                                         checksum + '.tar')
            try:
                fobj = open(blob_src_path)
            except IOError:
                raise PulpCodedValidationException(
                    error_code=error_codes.DKR1018,
                    layer=os.path.basename(blob_src_path))
            try:
                verification.verify_checksum(fobj, checksum_type, checksum)
            except verification.VerificationException:
                raise PulpCodedValidationException(
                    error_code=error_codes.DKR1017,
                    checksum_type=checksum_type,
                    checksum=checksum)
            fobj.close()

            blob_dest_path = os.path.join(self.get_working_dir(), item.digest)
            os.rename(blob_src_path, blob_dest_path)

        item.set_storage_path(item.digest)
        try:
            item.save_and_import_content(
                os.path.join(self.get_working_dir(), item.digest))
        except NotUniqueError:
            item = item.__class__.objects.get(**item.unit_key)
        repository.associate_single_unit(self.get_repo().repo_obj, item)
        if isinstance(item, models.Manifest):
            self.parent.uploaded_unit = item
コード例 #11
0
    def wrapper(*args, **kwargs):
        request = args[1]
        if allow_empty and not request.body:
            request.body_as_json = {}
            return func(*args, **kwargs)

        try:
            request_json = json.loads(request.body)
        except ValueError:
            raise PulpCodedValidationException(error_code=error_codes.PLP1009)
        else:
            request.body_as_json = _ensure_input_encoding(request_json)
        return func(*args, **kwargs)
コード例 #12
0
ファイル: upload.py プロジェクト: vrutkovs/pulp_docker
    def process_main(self, item=None):
        """
        Update tags based on the parent metadata

        :param item: Not used by this step
        :type  item: None
        """

        tag = self.parent.metadata['name']
        digest = self.parent.metadata['digest']
        repo_id = self.parent.repo.id
        manifest_type_id = models.Manifest._content_type_id.default
        repo_manifest_ids = repository.get_associated_unit_ids(
            repo_id, manifest_type_id)

        # check if there is manifest with such id within the queried repo
        # since we don't know if the provided digest is of an image manifest or manifest list
        # we need to try both.
        manifests = models.Manifest.objects.filter(digest=digest,
                                                   id__in=repo_manifest_ids)
        manifest_type = constants.MANIFEST_IMAGE_TYPE
        if manifests.count() == 0:
            manifest_list_type_id = models.ManifestList._content_type_id.default
            repo_manifest_list_ids = repository.get_associated_unit_ids(
                repo_id, manifest_list_type_id)
            manifests = models.ManifestList.objects.filter(
                digest=digest, id__in=repo_manifest_list_ids)
            manifest_type = constants.MANIFEST_LIST_TYPE
            if manifests.count() == 0:
                raise PulpCodedValidationException(
                    error_code=error_codes.DKR1010,
                    digest=digest,
                    repo_id=repo_id)

        new_tag = models.Tag.objects.tag_manifest(
            repo_id=self.parent.repo.id,
            tag_name=tag,
            manifest_digest=digest,
            schema_version=manifests[0].schema_version,
            manifest_type=manifest_type)

        if new_tag:
            repository.associate_single_unit(self.parent.repo.repo_obj,
                                             new_tag)
コード例 #13
0
    def process_main(self):
        _logger.debug(self.description)
        for unit_key in self.parent.step_get_local_units.units_to_download:
            hash_key = get_key_hash(unit_key)
            file_name = self.parent.deb_data[hash_key]['file_name']
            storage_path = generate_internal_storage_path(file_name)
            dest_dir = os.path.join(self.working_dir, storage_path)
            # validate the size of the file downloaded
            file_size = int(self.parent.deb_data[hash_key]['file_size'])
            if file_size != os.stat(dest_dir).st_size:
                raise PulpCodedValidationException(
                    error_code=error_codes.DEB1001, file_name=file_name)

            unit = self.get_conduit().init_unit(constants.DEB_TYPE_ID,
                                                unit_key,
                                                {'file_name': file_name},
                                                storage_path)
            shutil.move(dest_dir, unit.storage_path)
            self.get_conduit().save_unit(unit)
コード例 #14
0
    def process_main(self):
        """
        Validate the uploaded manifest list json, then import content unit into repository.
        """
        with open(self.parent.file_path, 'r') as uploaded_file:
            manifest_list = uploaded_file.read()
        models.ManifestList.check_json(manifest_list)
        digest = models.UnitMixin.calculate_digest(manifest_list)
        manifest_list_instance = models.ManifestList.from_json(
            manifest_list, digest)
        transfer_repo = self.get_repo()

        #  Ensure that all referenced manifests are already in repository.
        manifest_digests = set(manifest_list_instance.manifests)
        qs = models.Manifest.objects.filter(
            digest__in=sorted(manifest_digests)).only('id', 'digest')
        known_manifests = dict(
            (manifest['digest'], manifest['id']) for manifest in qs)
        unit_qs = pulp_models.RepositoryContentUnit.objects.filter(
            repo_id=transfer_repo.id,
            unit_id__in=known_manifests.values()).values_list('unit_id')
        unit_ids_in_repo = list(unit_qs)

        missing_manifest_digests = []
        for manifest_digest in manifest_digests:
            unit_id = known_manifests.get(manifest_digest)
            if not unit_id or unit_id not in unit_ids_in_repo:
                missing_manifest_digests.append(manifest_digest)
        if missing_manifest_digests:
            raise PulpCodedValidationException(
                error_code=error_codes.DKR1013,
                digests=missing_manifest_digests)

        manifest_list_instance.set_storage_path(digest)
        try:
            manifest_list_instance.save_and_import_content(
                self.parent.file_path)
        except NotUniqueError:
            manifest_list_instance = models.ManifestList.objects.get(
                **manifest_list_instance.unit_key)
        repository.associate_single_unit(transfer_repo.repo_obj,
                                         manifest_list_instance)
        self.parent.uploaded_unit = manifest_list_instance
コード例 #15
0
    def check_json(manifest_list_json):
        """
        Check the structure of a manifest list json file.

        This function is a sanity check to make sure the JSON contains the
        correct structure. It does not validate with the database.

        :param manifest_list_json: A JSON document describing a ManifestList object as defined by
                                   the Docker v2, Schema 2 Manifest List documentation.
        :type  manifest_list_json: basestring

        :raises PulpCodedValidationException: DKR1011 if manifest_list_json is invalid JSON
        :raises PulpCodedValidationException: DKR1012 if Manifest List has an invalid mediaType
        :raises PulpCodedValidationException: DKR1014 if any of the listed Manifests contain invalid
                                              mediaType
        :raises PulpCodedValidationException: DKR1015 if Manifest List does not have all required
                                              fields.
        :raises PulpCodedValidationException: DKR1016 if any Image Manifest in the list does not
                                              have all required fields.
        """
        try:
            manifest_list = json.loads(manifest_list_json)
        except ValueError:
            raise PulpCodedValidationException(error_code=error_codes.DKR1011)

        for field in MANIFEST_LIST_REQUIRED_FIELDS:
            if field not in manifest_list:
                raise PulpCodedValidationException(error_code=error_codes.DKR1015,
                                                   field=field)

        if manifest_list['mediaType'] != constants.MEDIATYPE_MANIFEST_LIST:
            raise PulpCodedValidationException(error_code=error_codes.DKR1012,
                                               media_type=manifest_list['mediaType'])

        for image_manifest_dict in manifest_list['manifests']:
            for field in IMAGE_MANIFEST_REQUIRED_FIELDS:
                if field not in image_manifest_dict:
                    raise PulpCodedValidationException(error_code=error_codes.DKR1016,
                                                       field=field)
            for field in IMAGE_MANIFEST_REQUIRED_PLATFORM_SUBFIELDS:
                if field not in image_manifest_dict['platform']:
                    subfield = "platform.{field}".format(field=field)
                    raise PulpCodedValidationException(error_code=error_codes.DKR1016,
                                                       field=subfield)

            if image_manifest_dict['mediaType'] not in [constants.MEDIATYPE_MANIFEST_S2,
                                                        constants.MEDIATYPE_MANIFEST_S1,
                                                        constants.MEDIATYPE_SIGNED_MANIFEST_S1]:
                raise PulpCodedValidationException(error_code=error_codes.DKR1014,
                                                   digest=image_manifest_dict['digest'])
コード例 #16
0
    def __init__(self, metadata_file_path, checksum_type=None):
        """
        :param metadata_file_path: full path to metadata file to be generated
        :type  metadata_file_path: str
        :param checksum_type: checksum type to be used to generate and prepend checksum
                              to the file names of files. If checksum_type is None,
                              no checksum is added to the filename
        :type checksum_type: str or None
        """

        self.metadata_file_path = metadata_file_path
        self.metadata_file_handle = None
        self.checksum_type = checksum_type
        self.checksum = None
        if self.checksum_type is not None:
            checksum_function = CHECKSUM_FUNCTIONS.get(checksum_type)
            if not checksum_function:
                raise PulpCodedValidationException(
                    [PulpCodedException(error_codes.PLP1005, checksum_type=checksum_type)])
            self.checksum_constructor = checksum_function
コード例 #17
0
def validate_postdistributor(repo, config):
    """
    Validates that the postdistributor_id is set and is valid for this repositotry.

    :param repo:   metadata describing the repository to which the configuration applies
    :type  repo:   pulp.plugins.model.Repository
    :param config: Pulp configuration for the distributor
    :type  config: pulp.plugins.config.PluginCallConfiguration

    :return: tuple comprised of a boolean indicating whether validation succeeded or failed and a
             list of errors (if any)
    :rtype: (bool, list of strings) or (bool, None)
    :raises: PulpCodedValidationException if postdistributor_id is not defined or 404 if the
             distributor_id is not associated with the repo

    """
    postdistributor = config.flatten().get("postdistributor_id", None)
    if postdistributor:
        Distributor.objects.get_or_404(repo_id=repo.id,
                                       distributor_id=postdistributor)
        return True, None
    else:
        raise PulpCodedValidationException(error_code=error_codes.DKR1009)
コード例 #18
0
def validate_config(config, repo):
    """
    Validate a configuration

    :param config: Pulp configuration for the distributor
    :type  config: pulp.plugins.config.PluginCallConfiguration
    :param repo:   metadata describing the repository to which the
                   configuration applies
    :type  repo:   pulp.plugins.model.Repository
    :raises:       PulpCodedValidationException if any validations failed
    """
    errors = []
    server_url = config.get(constants.CONFIG_KEY_REDIRECT_URL)
    if server_url:
        parsed = urlparse(server_url)
        if not parsed.scheme:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.DKR1001,
                    field=constants.CONFIG_KEY_REDIRECT_URL,
                    url=server_url))
        if not parsed.netloc:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.DKR1002,
                    field=constants.CONFIG_KEY_REDIRECT_URL,
                    url=server_url))
        if not parsed.path:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.DKR1003,
                    field=constants.CONFIG_KEY_REDIRECT_URL,
                    url=server_url))
    protected = config.get(constants.CONFIG_KEY_PROTECTED)
    if protected:
        protected_parsed = config.get_boolean(constants.CONFIG_KEY_PROTECTED)
        if protected_parsed is None:
            errors.append(
                PulpCodedValidationException(
                    error_code=error_codes.DKR1004,
                    field=constants.CONFIG_KEY_PROTECTED,
                    value=protected))

    # Check that the repo_registry is valid
    repo_registry_id = config.get(constants.CONFIG_KEY_REPO_REGISTRY_ID)
    if repo_registry_id and not _is_valid_repo_registry_id(repo_registry_id):
        errors.append(
            PulpCodedValidationException(
                error_code=error_codes.DKR1005,
                field=constants.CONFIG_KEY_REPO_REGISTRY_ID,
                value=repo_registry_id))
    # If the repo_registry_id is not specified, this value defaults to the
    # repo id, so we need to validate that.
    elif not repo_registry_id and not _is_valid_repo_registry_id(repo.id):
        errors.append(
            PulpCodedValidationException(
                error_code=error_codes.DKR1006,
                field=constants.CONFIG_KEY_REPO_REGISTRY_ID,
                value=repo.id))

    if errors:
        raise PulpCodedValidationException(validation_exceptions=errors)

    return True, None
コード例 #19
0
 def test_calls_method_with_kwargs(self):
     mock_method = Mock(side_effect=PulpCodedValidationException())
     util.assert_validation_exception(mock_method, [], baz="qux")
     mock_method.assert_called_once_with(baz="qux")
コード例 #20
0
ファイル: upload.py プロジェクト: bowlofeggs/pulp_rpm
def _extract_rpm_data(type_id, rpm_filename):
    """
    Extract a dict of information for a given RPM or SRPM.

    :param type_id: The type of the unit that is being generated
    :type  type_id: str

    :param rpm_filename: full path to the package to analyze
    :type  rpm_filename: str

    :return: dict of data about the package
    :rtype:  dict
    """
    rpm_data = dict()

    # Read the RPM header attributes for use later
    ts = rpm.TransactionSet()
    ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
    fd = os.open(rpm_filename, os.O_RDONLY)
    try:
        headers = ts.hdrFromFdno(fd)
        os.close(fd)
    except rpm.error:
        # Raised if the headers cannot be read
        os.close(fd)
        raise

    for k in ['name', 'version', 'release', 'epoch']:
        rpm_data[k] = headers[k]

    if rpm_data['epoch'] is not None:
        rpm_data['epoch'] = str(rpm_data['epoch'])
    else:
        rpm_data['epoch'] = str(0)

    if headers['sourcepackage']:
        if RPMTAG_NOSOURCE in headers.keys():
            rpm_data['arch'] = 'nosrc'
        else:
            rpm_data['arch'] = 'src'
    else:
        rpm_data['arch'] = headers['arch']

    # construct filename from metadata (BZ #1101168)
    if headers[rpm.RPMTAG_SOURCEPACKAGE]:
        if type_id != models.SRPM._content_type_id.default:
            raise PulpCodedValidationException(error_code=error_codes.RPM1002)
        rpm_basefilename = "%s-%s-%s.src.rpm" % (headers['name'],
                                                 headers['version'],
                                                 headers['release'])
    else:
        if type_id != models.RPM._content_type_id.default:
            raise PulpCodedValidationException(error_code=error_codes.RPM1003)
        rpm_basefilename = "%s-%s-%s.%s.rpm" % (headers['name'],
                                                headers['version'],
                                                headers['release'],
                                                headers['arch'])

    rpm_data['relativepath'] = rpm_basefilename
    rpm_data['filename'] = rpm_basefilename

    # This format is, and has always been, incorrect. As of the new yum importer, the
    # plugin will generate these from the XML snippet because the API into RPM headers
    # is atrocious. This is the end game for this functionality anyway, moving all of
    # that metadata derivation into the plugin, so this is just a first step.
    # I'm leaving these in and commented to show how not to do it.
    # rpm_data['requires'] = [(r,) for r in headers['requires']]
    # rpm_data['provides'] = [(p,) for p in headers['provides']]

    rpm_data['buildhost'] = headers['buildhost']
    rpm_data['license'] = headers['license']
    rpm_data['vendor'] = headers['vendor']
    rpm_data['description'] = headers['description']
    rpm_data['build_time'] = headers[rpm.RPMTAG_BUILDTIME]
    # Use the mtime of the file to match what is in the generated xml from
    # rpm_parse.get_package_xml(..)
    file_stat = os.stat(rpm_filename)
    rpm_data['time'] = file_stat[stat.ST_MTIME]

    return rpm_data
コード例 #21
0
 def test_calls_method_with_child_exceptions(self):
     mock_method = Mock(side_effect=PulpCodedValidationException(
         validation_exceptions=[PulpCodedException()]))
     util.assert_validation_exception(mock_method,
                                      error_codes=[error_codes.PLP0001])
コード例 #22
0
 def test_error_codes_no_child_exceptions(self):
     mock_method = Mock(side_effect=PulpCodedValidationException())
     self.assertRaises(AssertionError,
                       util.assert_validation_exception,
                       mock_method,
                       error_codes=[error_codes.PLP1002])
コード例 #23
0
def _generate_rpm_data(type_id, rpm_filename, user_metadata=None):
    """
    For the given RPM, analyzes its metadata to generate the appropriate unit
    key and metadata fields, returning both to the caller.

    :param type_id: The type of the unit that is being generated
    :type  type_id: str
    :param rpm_filename: full path to the RPM to analyze
    :type  rpm_filename: str
    :param user_metadata: user supplied metadata about the unit. This is optional.
    :type  user_metadata: dict

    :return: tuple of unit key and unit metadata for the RPM
    :rtype:  tuple
    """

    # Expected metadata fields:
    # "vendor", "description", "buildhost", "license", "vendor", "requires", "provides",
    # "relativepath", "filename"
    #
    # Expected unit key fields:
    # "name", "epoch", "version", "release", "arch", "checksumtype", "checksum"

    unit_key = dict()
    metadata = dict()

    # Read the RPM header attributes for use later
    ts = rpm.TransactionSet()
    ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES)
    fd = os.open(rpm_filename, os.O_RDONLY)
    try:
        headers = ts.hdrFromFdno(fd)
        os.close(fd)
    except rpm.error:
        # Raised if the headers cannot be read
        os.close(fd)
        raise

    # -- Unit Key -----------------------
    # Checksum
    if user_metadata and user_metadata.get('checksum_type'):
        user_checksum_type = user_metadata.get('checksum_type')
        user_checksum_type = verification.sanitize_checksum_type(
            user_checksum_type)
        unit_key['checksumtype'] = user_checksum_type
    else:
        unit_key['checksumtype'] = verification.TYPE_SHA256
    unit_key['checksum'] = _calculate_checksum(unit_key['checksumtype'],
                                               rpm_filename)

    # Name, Version, Release, Epoch
    for k in ['name', 'version', 'release', 'epoch']:
        unit_key[k] = headers[k]

    # Epoch munging
    if unit_key['epoch'] is None:
        unit_key['epoch'] = str(0)
    else:
        unit_key['epoch'] = str(unit_key['epoch'])

    # Arch
    if headers['sourcepackage']:
        if RPMTAG_NOSOURCE in headers.keys():
            unit_key['arch'] = 'nosrc'
        else:
            unit_key['arch'] = 'src'
    else:
        unit_key['arch'] = headers['arch']

    # -- Unit Metadata ------------------

    # construct filename from metadata (BZ #1101168)
    if headers[rpm.RPMTAG_SOURCEPACKAGE]:
        if type_id != models.SRPM.TYPE:
            raise PulpCodedValidationException(error_code=error_codes.RPM1002)
        rpm_basefilename = "%s-%s-%s.src.rpm" % (
            headers['name'], headers['version'], headers['release'])
    else:
        if type_id != models.RPM.TYPE:
            raise PulpCodedValidationException(error_code=error_codes.RPM1003)
        rpm_basefilename = "%s-%s-%s.%s.rpm" % (
            headers['name'], headers['version'], headers['release'],
            headers['arch'])

    metadata['relativepath'] = rpm_basefilename
    metadata['filename'] = rpm_basefilename

    # This format is, and has always been, incorrect. As of the new yum importer, the
    # plugin will generate these from the XML snippet because the API into RPM headers
    # is atrocious. This is the end game for this functionality anyway, moving all of
    # that metadata derivation into the plugin, so this is just a first step.
    # I'm leaving these in and commented to show how not to do it.
    # metadata['requires'] = [(r,) for r in headers['requires']]
    # metadata['provides'] = [(p,) for p in headers['provides']]

    metadata['buildhost'] = headers['buildhost']
    metadata['license'] = headers['license']
    metadata['vendor'] = headers['vendor']
    metadata['description'] = headers['description']
    metadata['build_time'] = headers[rpm.RPMTAG_BUILDTIME]
    # Use the mtime of the file to match what is in the generated xml from
    # rpm_parse.get_package_xml(..)
    file_stat = os.stat(rpm_filename)
    metadata['time'] = file_stat[stat.ST_MTIME]

    return unit_key, metadata
コード例 #24
0
 def test_child_exceptions_no_error_codes(self):
     mock_method = Mock(side_effect=PulpCodedValidationException(
         validation_exceptions=[PulpCodedException()]))
     self.assertRaises(AssertionError, util.assert_validation_exception,
                       mock_method, [])