Example #1
0
    def _verify_checksum(self, unit, report):
        """
        Verifies the checksum of the given unit if the sync is configured to do so.
        If the verification fails, the error is noted in this instance's progress
        report and the error is re-raised.

        :param unit: domain model instance of the package that was downloaded
        :type  unit: pulp_rpm.plugins.db.models.NonMetadataPackage
        :param report: report handed to this listener by the downloader
        :type  report: nectar.report.DownloadReport

        :raises verification.VerificationException: if the checksum of the content is incorrect
        """

        if not self.sync.config.get(importer_constants.KEY_VALIDATE):
            return

        with open(report.destination) as fp:
            sums = util.calculate_checksums(fp, [util.TYPE_MD5, util.TYPE_SHA1, util.TYPE_SHA256])

        if sums[unit.checksumtype] != unit.checksum:
            error_report = {
                constants.NAME: unit.name,
                constants.ERROR_CODE: constants.ERROR_CHECKSUM_VERIFICATION,
                constants.CHECKSUM_TYPE: unit.checksumtype,
                constants.ERROR_KEY_CHECKSUM_EXPECTED: unit.checksum,
                constants.ERROR_KEY_CHECKSUM_ACTUAL: sums[unit.checksumtype]
            }
            self.sync.progress_report['content'].failure(unit, error_report)
            # I don't know why the argument is the calculated sum, but that's the pre-existing
            # behavior in pulp.server.util.verify_checksum
            raise verification.VerificationException(sums[unit.checksumtype])
        else:
            # The unit will be saved later in the workflow, after the file is moved into place.
            unit.checksums.update(sums)
Example #2
0
def migrate(*args, **kwargs):
    """
    Add seperate fields for md5sum, sha1, sha256
    """
    warnings_encountered = False
    deb_collection = connection.get_collection('units_deb')
    deb_count = deb_collection.count()

    with migration_utils.MigrationProgressLog('Deb Package',
                                              deb_count) as progress_log:
        for deb_package in deb_collection.find({}).batch_size(100):
            storage_path = deb_package['_storage_path']
            package_id = deb_package['_id']
            if not os.path.exists(storage_path):
                warnings_encountered = True
                msg = 'deb package file corresponding to db_unit with _id = {}\n'\
                      'was not found at _storage_path = {}.\n'\
                      'The unit was not migrated!'.format(package_id, storage_path)
                _logger.warn(msg)
                continue

            with open(storage_path, 'r') as file_handle:
                checksums = server_utils.calculate_checksums(
                    file_handle, CHECKSUM_TYPES)

            new_fields = {
                'md5sum': checksums[server_utils.TYPE_MD5],
                'sha1': checksums[server_utils.TYPE_SHA1],
                'sha256': checksums[server_utils.TYPE_SHA256],
            }

            if checksums[
                    deb_package['checksumtype']] != deb_package['checksum']:
                raise Exception(
                    'New checksum does not match existing checksum for\n'
                    '_id = {}\nfile = {}'.format(package_id, storage_path))

            deb_collection.update_one(
                {'_id': package_id},
                {'$set': new_fields},
            )
            progress_log.progress()

    if warnings_encountered:
        msg = 'Warnings were encountered during the db migration!\n'\
              'Check the logs for more information, and consider deleting broken units.'
        _logger.warn(msg)
Example #3
0
    def validate(self, unit, *args):
        """Check the unit.

        :param unit: the unit to check
        :type unit: dict
        :param args: unused
        :returns: None
        :raises: MISSING_ERROR/CHECKSUM_ERROR
        """
        checksumtype, expected_checksum, _storage_path = self.get_unit_attributes(
            unit, 'checksumtype', 'checksum', '_storage_path')
        try:
            with open(_storage_path, 'rb') as fd:
                checksums = util.calculate_checksums(fd, [checksumtype])
        except (IOError, OSError):
            raise MISSING_ERROR
        if checksums[checksumtype] != expected_checksum:
            raise CHECKSUM_ERROR
Example #4
0
def verify_checksum(file_object, checksum_type, checksum_value):
    """
    Returns whether or not the checksum of the contents of the given file-like object match
    the expectation.

    :param file_object: file-like object to verify
    :type  file_object: file-like object

    :param checksum_type: type of checksum to calculate; must be one of the TYPE_* constants in
                          this module
    :type  checksum_type: str

    :param checksum_value: expected checksum to verify against
    :type  checksum_value: str

    :raises ValueError: if the checksum_type isn't one of the TYPE_* constants
    """
    calculated_sum = calculate_checksums(file_object, [checksum_type])[checksum_type]

    if calculated_sum != checksum_value:
        raise VerificationException(calculated_sum)
Example #5
0
def verify_checksum(file_object, checksum_type, checksum_value):
    """
    Returns whether or not the checksum of the contents of the given file-like object match
    the expectation.

    :param file_object: file-like object to verify
    :type  file_object: file-like object

    :param checksum_type: type of checksum to calculate; must be one of the TYPE_* constants in
                          this module
    :type  checksum_type: str

    :param checksum_value: expected checksum to verify against
    :type  checksum_value: str

    :raises ValueError: if the checksum_type isn't one of the TYPE_* constants
    """
    calculated_sum = calculate_checksums(file_object, [checksum_type])[checksum_type]

    if calculated_sum != checksum_value:
        raise VerificationException(calculated_sum)
Example #6
0
    def _verify_checksum(self, unit, location):
        """
        Verifies the checksum of the given unit if the sync is configured to do so.
        If the verification fails, the error is noted in this instance's progress
        report and the error is re-raised.

        :param unit: domain model instance of the package that was downloaded
        :type  unit: pulp_rpm.plugins.db.models.NonMetadataPackage
        :param location: location of the unit that needs to be verified
        :type  location: str

        :raises verification.VerificationException: if the checksum of the content is incorrect
        """

        if not self.sync.config.get(importer_constants.KEY_VALIDATE):
            return

        with open(location) as fp:
            sums = util.calculate_checksums(
                fp, [util.TYPE_MD5, util.TYPE_SHA1, util.TYPE_SHA256])

        if sums[unit.checksumtype] != unit.checksum:
            error_report = {
                constants.NAME: unit.name,
                constants.ERROR_CODE: constants.ERROR_CHECKSUM_VERIFICATION,
                constants.CHECKSUM_TYPE: unit.checksumtype,
                constants.ERROR_KEY_CHECKSUM_EXPECTED: unit.checksum,
                constants.ERROR_KEY_CHECKSUM_ACTUAL: sums[unit.checksumtype]
            }
            self.sync.progress_report['content'].failure(unit, error_report)
            # I don't know why the argument is the calculated sum, but that's the pre-existing
            # behavior in pulp.server.util.verify_checksum
            raise verification.VerificationException(sums[unit.checksumtype])
        else:
            # The unit will be saved later in the workflow, after the file is moved into place.
            unit.checksums.update(sums)
Example #7
0
 def _compute_checksum(cls, fobj):
     cstype = util.TYPE_SHA256
     return util.calculate_checksums(fobj, [cstype])[cstype]
Example #8
0
    def test_one(self):
        ret = util.calculate_checksums(self.f, ['sha256'])

        self.assertEqual(ret['sha256'], self.sha256_sum)
        self.assertTrue(len(ret), 1)
Example #9
0
    def test_with_data(self):
        ret = util.calculate_checksums(self.f, ['sha1', 'sha256'])

        self.assertEqual(ret['sha1'], self.sha1_sum)
        self.assertEqual(ret['sha256'], self.sha256_sum)
Example #10
0
def _handle_package(repo, type_id, unit_key, metadata, file_path, conduit, config):
    """
    Handles the upload for an RPM, SRPM or DRPM.

    This inspects the package contents to determine field values. The unit_key
    and metadata fields overwrite field values determined through package inspection.

    :param repo: The repository to import the package into
    :type  repo: pulp.server.db.model.Repository

    :param type_id: The type_id of the package being uploaded
    :type  type_id: str

    :param unit_key: A dictionary of fields to overwrite introspected field values
    :type  unit_key: dict

    :param metadata: A dictionary of fields to overwrite introspected field values, or None
    :type  metadata: dict or None

    :param file_path: The path to the uploaded package
    :type  file_path: str

    :param conduit: provides access to relevant Pulp functionality
    :type  conduit: pulp.plugins.conduits.upload.UploadConduit

    :param config: plugin configuration for the repository
    :type  config: pulp.plugins.config.PluginCallConfiguration

    :raises PulpCodedException PLP1005: if the checksum type from the user is not recognized
    :raises PulpCodedException PLP1013: if the checksum value from the user does not validate
    """
    try:
        if type_id == models.DRPM._content_type_id.default:
            rpm_data = _extract_drpm_data(file_path)
        else:
            rpm_data = _extract_rpm_data(type_id, file_path)
    except:
        _LOGGER.exception('Error extracting RPM metadata for [%s]' % file_path)
        raise

    # metadata can be None
    metadata = metadata or {}

    model_class = plugin_api.get_unit_model_by_id(type_id)
    update_fields_inbound(model_class, unit_key or {})
    update_fields_inbound(model_class, metadata or {})

    with open(file_path) as fp:
        sums = util.calculate_checksums(fp, models.RpmBase.DEFAULT_CHECKSUM_TYPES)

    # validate checksum if possible
    if metadata.get('checksum'):
        checksumtype = metadata.pop('checksum_type', util.TYPE_SHA256)
        checksumtype = util.sanitize_checksum_type(checksumtype)
        if checksumtype not in sums:
            raise PulpCodedException(error_code=error_codes.RPM1009, checksumtype=checksumtype)
        if metadata['checksum'] != sums[checksumtype]:
            raise PulpCodedException(error_code=platform_errors.PLP1013)
        _LOGGER.debug(_('Upload checksum matches.'))

    # Save all uploaded RPMs with sha256 in the unit key, since we can now publish with other
    # types, regardless of what is in the unit key.
    rpm_data['checksumtype'] = util.TYPE_SHA256
    rpm_data['checksum'] = sums[util.TYPE_SHA256]
    # keep all available checksum values on the model
    rpm_data['checksums'] = sums

    # Update the RPM-extracted data with anything additional the user specified.
    # Allow the user-specified values to override the extracted ones.
    rpm_data.update(metadata or {})
    rpm_data.update(unit_key or {})

    # Validate the user specified data by instantiating the model
    try:
        unit = model_class(**rpm_data)
    except TypeError:
        raise ModelInstantiationError()

    if type_id != models.DRPM._content_type_id.default:
        # Extract/adjust the repodata snippets
        repodata = rpm_parse.get_package_xml(file_path, sumtype=unit.checksumtype)
        _update_provides_requires(unit, repodata)
        _update_files(unit, repodata)
        unit.modify_xml(repodata)

    # check if the unit has duplicate nevra
    purge.remove_unit_duplicate_nevra(unit, repo)

    unit.set_storage_path(os.path.basename(file_path))
    try:
        unit.save_and_import_content(file_path)
    except NotUniqueError:
        unit = unit.__class__.objects.filter(**unit.unit_key).first()

    if rpm_parse.signature_enabled(config):
        rpm_parse.filter_signature(unit, config)
    repo_controller.associate_single_unit(repo, unit)
Example #11
0
def _handle_package(repo, type_id, unit_key, metadata, file_path, conduit,
                    config):
    """
    Handles the upload for an RPM or SRPM.

    This inspects the package contents to determine field values. The unit_key
    and metadata fields overwrite field values determined through package inspection.

    :param repo: The repository to import the package into
    :type  repo: pulp.server.db.model.Repository

    :param type_id: The type_id of the package being uploaded
    :type  type_id: str

    :param unit_key: A dictionary of fields to overwrite introspected field values
    :type  unit_key: dict

    :param metadata: A dictionary of fields to overwrite introspected field values, or None
    :type  metadata: dict or None

    :param file_path: The path to the uploaded package
    :type  file_path: str

    :param conduit: provides access to relevant Pulp functionality
    :type  conduit: pulp.plugins.conduits.upload.UploadConduit

    :param config: plugin configuration for the repository
    :type  config: pulp.plugins.config.PluginCallConfiguration

    :raises PulpCodedException PLP1005: if the checksum type from the user is not recognized
    :raises PulpCodedException PLP1013: if the checksum value from the user does not validate
    """
    try:
        rpm_data = _extract_rpm_data(type_id, file_path)
    except:
        _LOGGER.exception('Error extracting RPM metadata for [%s]' % file_path)
        raise

    # metadata can be None
    metadata = metadata or {}

    model_class = plugin_api.get_unit_model_by_id(type_id)
    update_fields_inbound(model_class, unit_key or {})
    update_fields_inbound(model_class, metadata or {})

    with open(file_path) as fp:
        sums = util.calculate_checksums(fp,
                                        models.RpmBase.DEFAULT_CHECKSUM_TYPES)

    # validate checksum if possible
    if metadata.get('checksum'):
        checksumtype = metadata.pop('checksum_type', util.TYPE_SHA256)
        checksumtype = util.sanitize_checksum_type(checksumtype)
        if checksumtype not in sums:
            raise PulpCodedException(error_code=error_codes.RPM1009,
                                     checksumtype=checksumtype)
        if metadata['checksum'] != sums[checksumtype]:
            raise PulpCodedException(error_code=platform_errors.PLP1013)
        _LOGGER.debug(_('Upload checksum matches.'))

    # Save all uploaded RPMs with sha256 in the unit key, since we can now publish with other
    # types, regardless of what is in the unit key.
    rpm_data['checksumtype'] = util.TYPE_SHA256
    rpm_data['checksum'] = sums[util.TYPE_SHA256]
    # keep all available checksum values on the model
    rpm_data['checksums'] = sums

    # Update the RPM-extracted data with anything additional the user specified.
    # Allow the user-specified values to override the extracted ones.
    rpm_data.update(metadata or {})
    rpm_data.update(unit_key or {})

    # Validate the user specified data by instantiating the model
    try:
        unit = model_class(**rpm_data)
    except TypeError:
        raise ModelInstantiationError()

    # Extract/adjust the repodata snippets
    unit.repodata = rpm_parse.get_package_xml(file_path,
                                              sumtype=unit.checksumtype)
    _update_provides_requires(unit)
    unit.modify_xml()

    # check if the unit has duplicate nevra
    purge.remove_unit_duplicate_nevra(unit, repo)

    unit.set_storage_path(os.path.basename(file_path))
    try:
        unit.save_and_import_content(file_path)
    except NotUniqueError:
        unit = unit.__class__.objects.filter(**unit.unit_key).first()

    repo_controller.associate_single_unit(repo, unit)
Example #12
0
 def _compute_checksum(cls, fobj):
     cstype = util.TYPE_SHA256
     return util.calculate_checksums(fobj, [cstype])[cstype]
Example #13
0
    def test_one(self):
        ret = util.calculate_checksums(self.f, ['sha256'])

        self.assertEqual(ret['sha256'], self.sha256_sum)
        self.assertTrue(len(ret), 1)
Example #14
0
    def test_with_data(self):
        ret = util.calculate_checksums(self.f, ['sha1', 'sha256'])

        self.assertEqual(ret['sha1'], self.sha1_sum)
        self.assertEqual(ret['sha256'], self.sha256_sum)
Example #15
0
def _handle_package(repo, type_id, unit_key, metadata, file_path, conduit,
                    config):
    """
    Handles the upload for an RPM, SRPM or DRPM.

    This inspects the package contents to determine field values. The unit_key
    and metadata fields overwrite field values determined through package inspection.

    :param repo: The repository to import the package into
    :type  repo: pulp.server.db.model.Repository

    :param type_id: The type_id of the package being uploaded
    :type  type_id: str

    :param unit_key: A dictionary of fields to overwrite introspected field values
    :type  unit_key: dict

    :param metadata: A dictionary of fields to overwrite introspected field values, or None
    :type  metadata: dict or None

    :param file_path: The path to the uploaded package
    :type  file_path: str

    :param conduit: provides access to relevant Pulp functionality
    :type  conduit: pulp.plugins.conduits.upload.UploadConduit

    :param config: plugin configuration for the repository
    :type  config: pulp.plugins.config.PluginCallConfiguration

    :raises PulpCodedException PLP1005: if the checksum type from the user is not recognized
    :raises PulpCodedException PLP1013: if the checksum value from the user does not validate
    """
    try:
        if type_id == models.DRPM._content_type_id.default:
            unit = models.DRPM(**_extract_drpm_data(file_path))
        else:
            repodata = rpm_parse.get_package_xml(file_path,
                                                 sumtype=util.TYPE_SHA256)
            package_xml = (utils.fake_xml_element(
                repodata['primary'],
                constants.COMMON_NAMESPACE).find(primary.PACKAGE_TAG))
            unit = primary.process_package_element(package_xml)
    except Exception:
        raise PulpCodedException(error_codes.RPM1016)

    # metadata can be None
    metadata = metadata or {}

    model_class = plugin_api.get_unit_model_by_id(type_id)
    update_fields_inbound(model_class, unit_key or {})
    update_fields_inbound(model_class, metadata or {})

    with open(file_path) as fp:
        sums = util.calculate_checksums(fp,
                                        models.RpmBase.DEFAULT_CHECKSUM_TYPES)

    # validate checksum if possible
    if metadata.get('checksum'):
        checksumtype = metadata.pop('checksum_type', util.TYPE_SHA256)
        checksumtype = util.sanitize_checksum_type(checksumtype)
        if checksumtype not in sums:
            raise PulpCodedException(error_code=error_codes.RPM1009,
                                     checksumtype=checksumtype)
        if metadata['checksum'] != sums[checksumtype]:
            raise PulpCodedException(error_code=platform_errors.PLP1013)
        _LOGGER.debug(_('Upload checksum matches.'))

    # Save all uploaded RPMs with sha256 in the unit key, since we can now publish with other
    # types, regardless of what is in the unit key.
    unit.checksumtype = util.TYPE_SHA256
    unit.checksum = sums[util.TYPE_SHA256]
    # keep all available checksum values on the model
    unit.checksums = sums

    # Update the RPM-extracted data with anything additional the user specified.
    # Allow the user-specified values to override the extracted ones.
    for key, value in metadata.items():
        setattr(unit, key, value)
    for key, value in unit_key.items():
        setattr(unit, key, value)

    if type_id != models.DRPM._content_type_id.default:
        # Extract/adjust the repodata snippets
        unit.signing_key = rpm_parse.package_signature(
            rpm_parse.package_headers(file_path))
        # construct filename from metadata (BZ #1101168)
        if type_id == models.SRPM._content_type_id.default:
            rpm_basefilename = "%s-%s-%s.src.rpm" % (unit.name, unit.version,
                                                     unit.release)
        else:
            rpm_basefilename = "%s-%s-%s.%s.rpm" % (unit.name, unit.version,
                                                    unit.release, unit.arch)
        unit.relativepath = rpm_basefilename
        unit.filename = rpm_basefilename
        _update_files(unit, repodata)
        unit.modify_xml(repodata)

    # check if the unit has duplicate nevra
    purge.remove_unit_duplicate_nevra(unit, repo)

    unit.set_storage_path(os.path.basename(file_path))
    try:
        unit.save_and_import_content(file_path)
    except TypeError:
        raise ModelInstantiationError()
    except NotUniqueError:
        unit = unit.__class__.objects.filter(**unit.unit_key).first()

    if rpm_parse.signature_enabled(config):
        rpm_parse.filter_signature(unit, config)
    repo_controller.associate_single_unit(repo, unit)