Beispiel #1
0
def _validate_spec_urls(specfile_path):
    """
    Validate a specfile's Source URLs.

    Args:
        specfile_path (str): The path to the specfile to parse and validate.

    Raises:
        exceptions.SpecUrlException: If the specfile contains Source URLs that
            are invalid.
    """
    # The output of spectool -l <spec> is in the format:
    # Source0: some-string-we-want-to-be-a-url.tar.gz
    # Source1: some-string-we-want-to-be-a-url.tar.gz
    # ...
    # Patch0: patch-we-expect-to-be-in-dist-git.patch
    # ...
    output = sp.check_output(['spectool', '-l', specfile_path])
    for line in output.splitlines():
        if line.startswith('Source'):
            # Parse to make sure it's a url
            url = line.split(':', 1)[1].strip()
            parsed_url = urlparse(url)
            if not parsed_url.scheme or not parsed_url.netloc:
                msg = (
                    "One or more of the specfile's Sources is not a valid URL "
                    "so we cannot automatically build the new version for you."
                    "Please use a URL in your Source declarations if possible."
                )
                raise exceptions.SpecUrlException(msg)
Beispiel #2
0
def compare_sources(old_sources, new_sources):
    """
    Compare two sets of files via checksum and raise an exception if both sets
    contain the same file.

    Args:
        old_sources (list): A list of filesystem paths to source tarballs.
        new_sources (list): A list of filesystem paths to source tarballs.

    Raises:
        exceptions.SpecUrlException: If the old and new sources share a file
    """
    old_checksums = set()
    new_checksums = set()
    for sources, checksums in (
        (old_sources, old_checksums),
        (new_sources, new_checksums),
    ):
        for file_path in sources:
            with open(file_path, "rb") as fd:
                h = hashlib.sha256()
                h.update(fd.read())
                checksums.add(h.hexdigest())

    if old_checksums.intersection(new_checksums):
        msg = (
            "One or more of the new sources for this package are identical to "
            "the old sources. This is most likely caused either by identical source files "
            "between releases, for example service files, or the specfile does not use "
            "version macro in its source URLs. If this is the second case, then please "
            "update the specfile to use version macro in its source URLs.\n")
        raise exceptions.SpecUrlException(msg)

    return old_checksums, new_checksums
Beispiel #3
0
def compare_sources(old_sources, new_sources):
    """
    Compare two sets of files via checksum and raise an exception if both sets
    contain the same file.

    Args:
        old_sources (list): A list of filesystem paths to source tarballs.
        new_sources (list): A list of filesystem paths to source tarballs.

    Raises:
        exceptions.SpecUrlException: If the old and new sources share a file
    """
    old_checksums = set()
    new_checksums = set()
    for sources, checksums in ((old_sources, old_checksums), (new_sources,
                                                              new_checksums)):
        for file_path in sources:
            with open(file_path, 'rb') as fd:
                h = hashlib.sha256()
                h.update(fd.read())
                checksums.add(h.hexdigest())

    if old_checksums.intersection(new_checksums):
        msg = (
            "One or more of the new sources for this package are identical to "
            "the old sources. It's likely this package does not use the version "
            "macro in its Source URLs. If possible, please update the specfile "
            "to include the version macro in the Source URLs")
        raise exceptions.SpecUrlException(msg)

    return old_checksums, new_checksums
Beispiel #4
0
def _validate_spec_urls(specfile_path):
    """
    Validate a specfile's Source URLs.

    Args:
        specfile_path (str): The path to the specfile to parse and validate.

    Raises:
        exceptions.SpecUrlException: If the specfile contains Source URLs that
            are invalid.
    """
    # The output of spectool -l <spec> is in the format:
    # Source0: some-string-we-want-to-be-a-url.tar.gz
    # Source1: some-string-we-want-to-be-a-url.tar.gz
    # ...
    # Patch0: patch-we-expect-to-be-in-dist-git.patch
    # ...
    output = sp.check_output(["spectool", "-l", specfile_path])
    bad_urls = []
    for line in output.decode("utf-8").splitlines():
        if line.startswith("Source"):
            # Parse to make sure it's a url
            url = line.split(":", 1)[1].strip()
            parsed_url = urlparse(url)
            if not parsed_url.scheme or not parsed_url.netloc:
                bad_urls.append(url)
    if bad_urls:
        msg = (
            "The following Sources of the specfile are not valid URLs "
            "so we cannot automatically build the new version for you.  "
            "Please use URLs in your Source declarations if possible.\n\n"
            "- " + "\n- ".join(bad_urls)
        )
        raise exceptions.SpecUrlException(msg)
Beispiel #5
0
def spec_sources(specfile_path, target_dir):
    """
    Retrieve a specfile's sources and store them in the given target directory.

    Example:
        >>> spec_sources('/path/to/specfile', '/tmp/dir')
        ['/tmp/dir/source0.tar.gz', '/tmp/dir/source1.tar.gz']

    Args:
        specfile_path (str): The filesystem path to the specfile
        target_dir (str): The directory is where the file(s) will be saved.

    Returns:
        list: A list of absolute paths to source files downloaded

    Raises:
        exceptions.SpecUrlException: If the specfile contains Source URLs that
            are invalid.
        exceptions.DownloadException: If a networking-related error occurs while
            downloading the specfile sources. This includes hostname resolution,
            non-200 HTTP status codes, SSL errors, etc.
    """
    _validate_spec_urls(specfile_path)
    files = []
    try:
        output = sp.check_output(['spectool', '-g', specfile_path],
                                 cwd=target_dir)
        for line in output.splitlines():
            if line.startswith('Getting'):
                files.append(
                    os.path.realpath(os.path.join(target_dir,
                                                  line.split()[-1])))
    except sp.CalledProcessError as e:
        # spectool passes the cURL exit codes back so see its manpage for the full list
        if e.returncode == 1:
            # Unknown protocol (e.g. not ftp, http, or https)
            msg = (
                'The specfile contains a Source URL with an unknown protocol; it should'
                'be "https", "http", or "ftp".')
            raise exceptions.SpecUrlException(msg)
        elif e.returncode in (5, 6):
            msg = "Unable to resolve the hostname for one of the package's Source URLs"
        elif e.returncode == 7:
            # Failed to connect to the host
            msg = "Unable to connect to the host for one of the package's Source URLs"
        elif e.returncode == 22:
            # cURL uses 22 for 400+ HTTP errors; the final line contains the specific code
            msg = (
                "An HTTP error occurred downloading the package's new Source URLs: "
                + e.output.splitlines()[-1])
        elif e.returncode == 60:
            msg = (
                "Unable to validate the TLS certificate for one of the package's"
                "Source URLs")
        else:
            msg = (
                u'An unexpected error occurred while downloading the new package sources; '
                u'please report this as a bug on the-new-hotness issue tracker.'
            )
            _log.error('{cmd} failed (exit {code}): {msg}'.format(
                cmd=e.cmd, code=e.returncode, msg=e.output))
        raise exceptions.DownloadException(msg)

    return files