Beispiel #1
0
def build_unzip(zip_path):
    """Return correct unzip command and the prefix folder name of the contents of zip file.

    This function will run the zip file through a content list, parsing that list to get the
    root folder name containing the zip file's contents.

    The output of the unzip -q -l command has the following format:
    ***snip***
    (optional) hash
    Length      Date    Time    Name
    ---------  ---------- -----   ----
            0  01-01-2000 00:00   prefix-dir/sub_dir1/subdir2
    ***snip***
    and this function gets the 'prefix-dir' portion from the start of the unzip -l output.
    """
    prefix = ""
    contents = subprocess.check_output(["unzip", "-q", "-l", zip_path], universal_newlines=True)
    lines = contents.splitlines() if contents else []
    # looking for directory prefix in unzip output as it may differ from default
    if len(lines) > 2:
        # In some cases there is a hash line so we detect this based on the header
        # separator character '-'
        prefix_line = 3 if len(lines) > 3 and lines[2][0] == '-' else 2
        prefix = lines[prefix_line].split("/")[0].split()[-1]
    else:
        print_fatal("Zip file doesn't appear to have any content")
        exit(1)

    if not prefix:
        print_fatal("Malformed zipfile, unable to determine zipfile prefix")
        exit(1)

    extract_cmd = "unzip -qq -d {0} {1}".format(build.base_path, zip_path)
    return extract_cmd, prefix
Beispiel #2
0
def scan_for_licenses(srcdir, config, pkg_name):
    """Scan the project directory for things we can use to guess a description and summary."""
    targets = [
        "copyright", "copyright.txt", "apache-2.0", "artistic.txt",
        "libcurllicense", "gpl.txt", "gpl2.txt", "gplv2.txt", "notice",
        "copyrights", "about_bsd.txt"
    ]
    # look for files that start with copying or licen[cs]e (but are
    # not likely scripts) or end with licen[cs]e
    target_pat = re.compile(
        r"^((copying)|(licen[cs]e)|(e[dp]l-v\d+))|(licen[cs]e)(\.(txt|xml))?$")
    for dirpath, dirnames, files in os.walk(srcdir):
        for name in files:
            if name.lower() in targets or target_pat.search(name.lower()):
                license_from_copying_hash(os.path.join(dirpath, name), srcdir,
                                          config, pkg_name)
            # Also look for files that end with .txt and reside in a LICENSES
            # directory. This is a convention that KDE is adopting.
            if os.path.basename(dirpath) == "LICENSES" and re.search(
                    r'\.txt$', name):
                license_from_copying_hash(os.path.join(dirpath, name), srcdir,
                                          config, pkg_name)

    if not licenses:
        print_fatal(
            " Cannot find any license or a valid {}.license file!\n".format(
                pkg_name))
        sys.exit(1)

    print("Licenses    : ", " ".join(sorted(licenses)))
Beispiel #3
0
def build_untar(tarball_path):
    """Determine extract command and tarball prefix from tar -tf output."""
    prefix = None
    if tarfile.is_tarfile(tarball_path):
        with tarfile.open(tarball_path, 'r') as content:
            lines = content.getnames()
            # When tarball is not empty
            if len(lines) == 0:
                print_fatal("Tar file doesn't appear to have any content")
                exit(1)
            elif len(lines) > 1:
                prefix = os.path.commonpath(lines)
    else:
        print_fatal("Not a valid tar file.")
        exit(1)

    # If we didn't find a common prefix, make a dir, based on the tar filename
    if not prefix:
        subdir = os.path.splitext(os.path.basename(tarball_path))[0]
        extract_cmd = "tar --directory={0} --one-top-level={1} -xf {2}".format(
            build.base_path, subdir, tarball_path)
    else:
        extract_cmd = "tar --directory={0} -xf {1}".format(
            build.base_path, tarball_path)
    return extract_cmd, prefix
Beispiel #4
0
def examine_abi_host(download_path, results_dir):
    """Make use of the hostside abireport tool."""
    try:
        util.call("abireport scan-packages \"{}\"".format(results_dir),
                  cwd=download_path)
    except Exception as e:
        util.print_fatal("Error invoking abireport: {}".format(e))
Beispiel #5
0
    def save_system_pgo(self, mock_dir, content_name, config):
        """Copy chroot profiles to system pgo."""
        root_dir_src = "/"
        system_pgo_dir_src = "/var/tmp/pgo"
        system_pgo_dir_dst = f"{config.download_path}/pgo.tar.gz"
        system_gitignore = f"{config.download_path}/.gitignore"
        tar_cmd = f"tar --directory={root_dir_src} --create --file=- var/tmp/pgo/ | pigz -9 -p 20 > {system_pgo_dir_dst}"
        if os.path.isdir(system_pgo_dir_src):
            if any(os.scandir(system_pgo_dir_src)):
                if os.path.isfile(system_pgo_dir_dst):
                    os.remove(system_pgo_dir_dst)
                try:
                    process = subprocess.run(
                        tar_cmd,
                        check=True,
                        shell=True,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.STDOUT,
                        text=True,
                        universal_newlines=True,
                    )
                except subprocess.CalledProcessError as err:
                    print_fatal(
                        f"Unable to archive {system_pgo_dir_src} in {system_pgo_dir_dst} from {tar_cmd}: {err}"
                    )
                    sys.exit(1)

                append_new_gitrule = True
                with util.open_auto(system_gitignore, "r+") as gitignore:
                    for line in gitignore:
                        if "!pgo.tar.gz" in line:
                            append_new_gitrule = False
                            break
                    if append_new_gitrule:
                        gitignore.write("!pgo.tar.gz\n")
Beispiel #6
0
def git_ls_remote_custom_re(remote_url_cmd, clone_path, path, conf):
    git_ls_remote_cmd1_result = ""
    git_ls_remote_cmd1_re1_result_pre_sort = []
    git_ls_remote_cmd1_re1_result_sorted = []

    process = subprocess.run(
        remote_url_cmd,
        check=False,
        shell=True,
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
        text=True,
        universal_newlines=True,
        cwd=clone_path,
    )
    git_ls_remote_cmd1_result = process.stdout

    git_ls_remote_cmd1_re1_result = []
    if git_ls_remote_cmd1_result:
        git_ls_remote_cmd1_re1 = re.compile(r"(?<=refs\/tags\/).*", re.MULTILINE)
        git_ls_remote_cmd1_re1_result = git_ls_remote_cmd1_re1.findall(git_ls_remote_cmd1_result)
        #if util.debugging:
            #if git_ls_remote_cmd1_re1_result:
                #for r in git_ls_remote_cmd1_re1_result:
                    #print_debug(r)

        if git_ls_remote_cmd1_re1_result:
            #if util.debugging:
                    #print_debug(f"conf.custom_git_re: {conf.custom_git_re2}")
            git_ls_remote_cmd1_re4_pre = r"{0}".format(conf.custom_git_re2)
            git_ls_remote_cmd1_re4 = re.Pattern
            try:
                git_ls_remote_cmd1_re4 = re.compile(git_ls_remote_cmd1_re4_pre, re.MULTILINE)
            except re.error as err:
                    print_fatal(f"Custom git regex: {git_ls_remote_cmd1_re4.pattern}")
                    print_fatal(f"Unable to create custom git regex: {err}")
            for r in git_ls_remote_cmd1_re1_result:
                git_ls_remote_cmd1_re4_result = git_ls_remote_cmd1_re4.search(r)
                if git_ls_remote_cmd1_re4_result:
                    #if util.debugging:
                        #print_debug(f"{git_ls_remote_cmd1_re4_result.group(1)}.{git_ls_remote_cmd1_re4_result.group(2)}.{git_ls_remote_cmd1_re4_result.group(3)}.{git_ls_remote_cmd1_re4_result.group(4)}.{git_ls_remote_cmd1_re4_result.group(5)}.{git_ls_remote_cmd1_re4_result.group(6)}.{git_ls_remote_cmd1_re4_result.group(7)}")
                    if git_ls_remote_cmd1_re4_result.group(1):
                        outputVersionPre = f"{git_ls_remote_cmd1_re4_result.group(1)}"
                    if git_ls_remote_cmd1_re4_result.group(2):
                        outputVersionPre = f"{outputVersionPre}.{git_ls_remote_cmd1_re4_result.group(2)}"
                    if git_ls_remote_cmd1_re4_result.group(3):
                        outputVersionPre = f"{outputVersionPre}.{git_ls_remote_cmd1_re4_result.group(3)}"
                    if git_ls_remote_cmd1_re4_result.group(4):
                        outputVersionPre = f"{outputVersionPre}.{git_ls_remote_cmd1_re4_result.group(4)}"
                    if git_ls_remote_cmd1_re4_result.group(5):
                        outputVersionPre = f"{outputVersionPre}{git_ls_remote_cmd1_re4_result.group(5)}"
                    #if util.debugging:
                        #print_debug(f"outputVersionPre: {outputVersionPre}")
                    git_ls_remote_cmd1_re1_result_pre_sort.append(outputVersionPre)
            if git_ls_remote_cmd1_re1_result_pre_sort:
                git_ls_remote_cmd1_re1_result_sorted = natsort.natsorted(git_ls_remote_cmd1_re1_result_pre_sort, key=lambda x: x.replace('.', '~')+'z')
    if git_ls_remote_cmd1_re1_result_sorted:
        return git_ls_remote_cmd1_re1_result_sorted[-1]
    else:
        return ""
Beispiel #7
0
def remove_clone_archive(path, clone_path, is_fatal):
    """Remove temporary clone_archive git folder."""
    try:
        call(f"rm -rf {clone_path}", cwd=path)
    except subprocess.CalledProcessError as err:
        if is_fatal:
            print_fatal("Unable to remove {}: {}".format(clone_path, err))
Beispiel #8
0
def package(filemanager):
    global round
    round = round + 1
    set_mock()
    print("Building package " + tarball.name + " round", round)
    # call(mock_cmd + " -q -r clear --scrub=cache")
    # call(mock_cmd + " -q -r clear --scrub=all")
    shutil.rmtree('{}/results'.format(download_path), ignore_errors=True)
    os.makedirs('{}/results'.format(download_path))
    util.call(
        mock_cmd +
        " -r clear --buildsrpm --sources=./ --spec={0}.spec --uniqueext={0} --result=results/ --no-cleanup-after"
        .format(tarball.name),
        logfile="%s/mock_srpm.log" % download_path,
        cwd=download_path)
    util.call("rm -f results/build.log", cwd=download_path)
    srcrpm = "results/%s-%s-%s.src.rpm" % (tarball.name, tarball.version,
                                           tarball.release)
    returncode = util.call(
        mock_cmd +
        " -r clear  --result=results/ %s --enable-plugin=ccache  --uniqueext=%s --no-cleanup-after"
        % (srcrpm, tarball.name),
        logfile="%s/mock_build.log" % download_path,
        check=False,
        cwd=download_path)
    # sanity check the build log
    if not os.path.exists(download_path + "/results/build.log"):
        util.print_fatal(
            "Mock command failed, results log does not exist. User may not have correct permissions."
        )
        exit(1)

    parse_build_results(download_path + "/results/build.log", returncode,
                        filemanager)
Beispiel #9
0
def scan_for_licenses(srcdir):
    """
    Scan the project directory for things we can use to guess a description
    and summary
    """
    targets = ["copyright",
               "copyright.txt",
               "apache-2.0",
               "libcurllicense",
               "gpl.txt",
               "gplv2.txt",
               "notice",
               "copyrights",
               "about_bsd.txt"]
    # look for files that start with copying or licen[cs]e (spelling errors)
    # or end with licen[cs]e
    target_pat = re.compile(r"^((copying)|(licen[cs]e))|(licen[cs]e)$")
    for dirpath, dirnames, files in os.walk(srcdir):
        for name in files:
            if name.lower() in targets or target_pat.search(name.lower()):
                license_from_copying_hash(os.path.join(dirpath, name), srcdir)

    if not licenses:
        print_fatal(" Cannot find any license or {}.license file!\n".format(tarball.name))
        sys.exit(1)

    print("Licenses    : ", " ".join(sorted(licenses)))
Beispiel #10
0
def build_untar(tarball_path):
    """Determine extract command and tarball prefix from tar -tf output."""
    tar_prefix = ""
    try:
        tarball_contents = subprocess.check_output(
            ["tar", "-tf", tarball_path], universal_newlines=True).split("\n")
    except subprocess.CalledProcessError as cpe:
        file_type = subprocess.check_output(["file", tarball_path
                                             ]).decode("utf-8").strip()
        print_fatal(
            "tarball inspection failed, unable to determine tarball contents:\n"
            "{}\n{}\n".format(file_type, cpe))
        exit(1)

    extract_cmd = "tar --directory={0} -xf {1}".format(build.base_path,
                                                       tarball_path)
    for line in tarball_contents:
        # sometimes a leading ./ is prepended to the line, this is not the prefix
        line = line.lstrip("./")
        # skip this line, it does not contain the prefix or is not a directory
        if not line or "/" not in line:
            continue

        tar_prefix = line.split("/")[0]
        if tar_prefix:
            break

    if not tar_prefix:
        print_fatal("malformed tarball, unable to determine tarball prefix")
        exit(1)

    return extract_cmd, tar_prefix
Beispiel #11
0
def build_unzip(zip_path):
    """Return correct unzip command and the prefix folder name of the contents of zip file.

    This function will run the zip file through a content list, parsing that list to get the
    root folder name containing the zip file's contents.
    """
    prefix = None

    if zipfile.is_zipfile(zip_path):
        with zipfile.ZipFile(zip_path, 'r') as content:
            lines = content.namelist()
            # When zipfile is not empty
            if len(lines) > 0:
                prefix = os.path.commonpath(lines)
            else:
                print_fatal("Zip file doesn't appear to have any content")
                exit(1)
    else:
        print_fatal("Not a valid zip file.")
        exit(1)

    # If we didn't find a common prefix, make a dir, based on the zip filename
    if not prefix:
        subdir = os.path.splitext(os.path.basename(zip_path))[0]
        extract_cmd = "unzip -qq -d {0} {1}".format(
            os.path.join(build.base_path, subdir), zip_path)
    else:
        extract_cmd = "unzip -qq -d {0} {1}".format(build.base_path, zip_path)

    return extract_cmd, prefix
Beispiel #12
0
def purge_tree(tree):
    if not os.path.exists(tree):
        return
    try:
        shutil.rmtree(tree)
    except Exception as e:
        util.print_fatal("Cannot remove tree: {}".format(e))
        sys.exit(1)
Beispiel #13
0
def purge_tree(tree):
    if not os.path.exists(tree):
        return
    try:
        shutil.rmtree(tree)
    except Exception as e:
        util.print_fatal("Cannot remove tree: {}".format(e))
        sys.exit(1)
Beispiel #14
0
def logcheck(pkg_loc):
    """Try to discover configuration options that were automatically switched off."""
    log = os.path.join(pkg_loc, 'results', 'build.log')
    if not os.path.exists(log):
        print('build log is missing, unable to perform logcheck.')
        return

    whitelist = []
    file_dir = os.path.dirname(os.path.abspath(__file__))
    file_path = os.path.join(file_dir, 'configure_whitelist')
    with open(file_path, "r") as whitelistf:
        for line in whitelistf:
            if line.startswith("#"):
                continue
            whitelist.append(line.rstrip())

    blacklist = []
    file_dir = os.path.dirname(os.path.abspath(__file__))
    file_path = os.path.join(file_dir, 'configure_blacklist')
    with open(file_path, "r") as blacklistf:
        for line in blacklistf:
            if line.startswith("#"):
                continue
            blacklist.append(line.rstrip())

    with open(log, 'r') as logf:
        lines = logf.readlines()

    pat = re.compile(r"^checking (?:for )?(.*?)\.\.\. no")
    misses = []
    for line in lines:
        match = None
        m = pat.search(line)
        if m:
            match = m.group(1)

        if "none required" in line:
            match = None

        if "warning: format not a string literal" in line:
            match = line

        if not match or match in whitelist:
            continue

        if match in blacklist:
            print_fatal("Blacklisted configure-miss is forbidden: " + match)
            misses.append("Blacklisted configure-miss is forbidden: " + match)
            write_misses(pkg_loc, misses)
            exit(1)

        print("Configure miss: " + match)
        misses.append("Configure miss: " + match)

    if not misses:
        return

    write_misses(pkg_loc, misses)
Beispiel #15
0
def package(filemanager, mockconfig, mockopts, cleanup=False):
    """Run main package build routine."""
    global round
    global uniqueext
    global success
    round = round + 1
    success = 0
    mock_cmd = get_mock_cmd()
    print("Building package " + tarball.name + " round", round)

    # determine uniqueext only once
    if cleanup:
        uniqueext = uniqueext or get_uniqueext("/var/lib/mock", "clear",
                                               tarball.name)
        cleanup_flag = "--cleanup-after"
    else:
        uniqueext = tarball.name
        cleanup_flag = "--no-cleanup-after"

    print("{} mock chroot at /var/lib/mock/clear-{}".format(
        tarball.name, uniqueext))

    if round == 1:
        shutil.rmtree('{}/results'.format(download_path), ignore_errors=True)
        os.makedirs('{}/results'.format(download_path))

    util.call("{} -r {} --buildsrpm --sources=./ --spec={}.spec "
              "--uniqueext={} --result=results/ {} {}".format(
                  mock_cmd, mockconfig, tarball.name, uniqueext, cleanup_flag,
                  mockopts),
              logfile="%s/results/mock_srpm.log" % download_path,
              cwd=download_path)

    # back up srpm mock logs
    util.call("mv results/root.log results/srpm-root.log", cwd=download_path)
    util.call("mv results/build.log results/srpm-build.log", cwd=download_path)

    srcrpm = "results/%s-%s-%s.src.rpm" % (tarball.name, tarball.version,
                                           tarball.release)
    returncode = util.call("{} -r {} --result=results/ {} "
                           "--enable-plugin=ccache  --uniqueext={} {}".format(
                               mock_cmd, mockconfig, srcrpm, uniqueext,
                               cleanup_flag),
                           logfile="%s/results/mock_build.log" % download_path,
                           check=False,
                           cwd=download_path)
    # sanity check the build log
    if not os.path.exists(download_path + "/results/build.log"):
        util.print_fatal(
            "Mock command failed, results log does not exist. User may not have correct permissions."
        )
        exit(1)

    is_clean = parse_buildroot_log(download_path + "/results/root.log",
                                   returncode)
    if is_clean:
        parse_build_results(download_path + "/results/build.log", returncode,
                            filemanager)
Beispiel #16
0
def build_un7z(zip_path):
    """Return correct 7z command and the prefix folder name of the contents of 7z file.

    This function will run the 7z file through a content list, parsing that list to get the
    root folder name containing the 7z file's contents.

    The output of the 7z l command has the following format:
    ***snip***
    7-Zip [64] 16.02 : Copyright (c) 1999-2016 Igor Pavlov : 2016-05-21
    p7zip Version 16.02 (locale=en_US.UTF-8,Utf16=on,HugeFiles=on,64 bits,4 CPUs Intel(R) Core(TM) i5-6260U CPU @ 1.80GHz (406E3),ASM,AES-NI)

    Scanning the drive for archives:
    1 file, 7933454 bytes (7748 KiB)

    Listing archive: foo.7z

    --
    Path = foo.7z
    Type = 7z
    Physical Size = 7933454
    Headers Size = 1526
    Method = LZMA2:23
    Solid = +
    Blocks = 1

    Date         Time    Attr         Size   Compressed  Name
    ------------------- ----- ------------ ------------  ------------------------
    2018-05-15 05:50:54 ....A        25095      7931928  prefix-dir/sub_dir1/subdir2
    ***snip***

    and this function gets the 'prefix-dir' portion from the start of the unzip -l output.
    """
    prefix = ""
    contents = subprocess.check_output(["7z", "l", zip_path],
                                       universal_newlines=True)
    lines = contents.splitlines() if contents else []
    # looking for directory prefix in unzip output as it may differ from default
    past_header = False
    for line in lines:
        if past_header:
            # This should be an archive entry; use it
            prefix = line.split("/")[0].split()[-1]
            break
        if line.startswith('----------'):
            # This is the header line; next line should be an archive entry
            past_header = True

    if not past_header:
        print_fatal("Zip file doesn't appear to have any content")
        exit(1)

    if not prefix:
        print_fatal("Malformed zipfile, unable to determine zipfile prefix")
        exit(1)

    extract_cmd = "7z x -o{0} {1}".format(build.base_path, zip_path)
    return extract_cmd, prefix
Beispiel #17
0
def check_requirements(use_git):
    """Ensure all requirements are satisfied before continuing."""
    required_bins = ["mock", "rpm2cpio", "nm", "objdump", "cpio", "readelf"]

    if use_git:
        required_bins.append("git")

    missing = [x for x in required_bins if not binary_in_path(x)]

    if missing:
        print_fatal("Required programs are not installed: {}".format(", ".join(missing)))
        sys.exit(1)
Beispiel #18
0
def license_from_copying_hash(copying, srcdir):
    """Add licenses based on the hash of the copying file"""
    data = tarball.get_contents(copying)
    if data.startswith(b'#!'):
        # Not a license if this is a script
        return

    sh = hashlib.sha1()
    sh.update(data)
    hash_sum = sh.hexdigest()

    if config.license_fetch:
        values = {'hash': hash_sum, 'text': data, 'package': tarball.name}
        data = urllib.parse.urlencode(values)
        data = data.encode('utf-8')

        buffer = BytesIO()
        c = pycurl.Curl()
        c.setopt(c.URL, config.license_fetch)
        c.setopt(c.WRITEDATA, buffer)
        c.setopt(c.POSTFIELDS, data)
        c.setopt(c.FOLLOWLOCATION, 1)
        try:
            c.perform()
        except Exception as excep:
            print_fatal("Failed to fetch license from {}: {}".format(
                config.license_fetch, excep))
            c.close()
            sys.exit(1)

        c.close()

        response = buffer.getvalue()
        page = response.decode('utf-8').strip()
        if page:
            print("License     : ", page, " (server) (", hash_sum, ")")
            process_licenses(page)

            if page != "none":
                lic_path = copying[len(srcdir) + 1:]
                license_files.append(shlex.quote(lic_path))

            return

    if hash_sum in config.license_hashes:
        add_license(config.license_hashes[hash_sum])
    else:
        if not config.license_show:
            return
        print_warning("Unknown license {0} with hash {1}".format(
            copying, hash_sum))
        hash_url = config.license_show % {'HASH': hash_sum}
        print_warning("Visit {0} to enter".format(hash_url))
Beispiel #19
0
def do_curl(url, dest=None, post=None, is_fatal=False):
    """
    Perform a curl operation for `url`.

    If `post` is set, a POST is performed for `url` with fields taken from the
    specified value. Otherwise a GET is performed for `url`. If `dest` is set,
    the curl response (if successful) is written to the specified path and the
    path is returned. Otherwise a successful response is returned as a BytesIO
    object. If `is_fatal` is `True` (`False` is the default), a GET failure,
    POST failure, or a failure to write to the path specified for `dest`
    results in the program exiting with an error. Otherwise, `None` is returned
    for any of those error conditions.
    """
    c = pycurl.Curl()
    c.setopt(c.URL, url)
    if post:
        c.setopt(c.POSTFIELDS, post)
    c.setopt(c.FOLLOWLOCATION, True)
    c.setopt(c.FAILONERROR, True)
    c.setopt(c.CONNECTTIMEOUT, 10)
    c.setopt(c.TIMEOUT, 600)
    c.setopt(c.LOW_SPEED_LIMIT, 1)
    c.setopt(c.LOW_SPEED_TIME, 10)
    buf = BytesIO()
    c.setopt(c.WRITEDATA, buf)
    try:
        c.perform()
    except pycurl.error as e:
        if is_fatal:
            print_fatal("Unable to fetch {}: {}".format(url, e))
            sys.exit(1)
        return None
    finally:
        c.close()

    # write to dest if specified
    if dest:
        try:
            with open(dest, 'wb') as fp:
                fp.write(buf.getvalue())
        except IOError as e:
            if os.path.exists(dest):
                os.unlink(dest)
            if is_fatal:
                print_fatal("Unable to write to {}: {}".format(dest, e))
                sys.exit(1)
            return None

    if dest:
        return dest
    else:
        return buf
Beispiel #20
0
def check_requirements(useGit):
    """ Ensure all requirements are satisfied before continuing """
    required_bins = ["mock", "rpm2cpio", "nm", "objdump", "cpio", "readelf"]

    if useGit:
        required_bins.append("git")

    paths = os.getenv("PATH", default="/usr/bin:/bin").split(os.pathsep)
    missing = [x for x in required_bins if not binary_in_path(x, paths)]

    if len(missing) > 0:
        print_fatal("Required programs are not installed: {}".format(", ".join(missing)))
        sys.exit(1)
Beispiel #21
0
 def set_zip_prefix(self):
     """Determine prefix folder name of zip file."""
     if zipfile.is_zipfile(self.path):
         with zipfile.ZipFile(self.path, 'r') as content:
             lines = content.namelist()
             # When zipfile is not empty
             if len(lines) > 0:
                 self.prefix = os.path.commonpath(lines)
             else:
                 print_fatal("Zip file doesn't appear to have any content")
                 exit(1)
     else:
         print_fatal("Not a valid zip file.")
         exit(1)
Beispiel #22
0
def examine_abi(download_path, name):
    """Proxy the ABI reporting to the right function."""
    download_path = os.path.abspath(download_path)
    results_dir = os.path.abspath(os.path.join(download_path, "results"))

    if not os.path.exists(results_dir):
        util.print_fatal("Results directory does not exist, aborting")
        sys.exit(1)

    if util.binary_in_path("abireport"):
        examine_abi_host(download_path, results_dir, name)
    else:
        util.print_warning("abireport is not installed. Using slow scanning")
        examine_abi_fallback(download_path, results_dir, name)
Beispiel #23
0
def link_new_rpms_here():
    make_cmd = f"make link-new-rpms-here"
    try:
        process = subprocess.run(
            make_cmd,
            check=True,
            shell=True,
            stdout=subprocess.PIPE,
            stderr=subprocess.STDOUT,
            text=True,
            universal_newlines=True,
        )
    except subprocess.CalledProcessError as err:
        print_fatal(f"Error: {make_cmd}: {err}")
        sys.exit(1)
Beispiel #24
0
 def write_python_flags_fix(self, mock_dir, content_name, config):
     """Patch python to use custom flags."""
     python_dir_dst = f"{mock_dir}/clear-{content_name}/root/usr/lib/python3.9"
     python_dir_patched_file = f"{python_dir_dst}/patched"
     patch_file = "/aot/build/clearlinux/projects/autospec/autospec/0001-Fix-PYTHON-flags.patch"
     patch_cmd = f"sudo /usr/bin/patch --backup -p1 --fuzz=2 --input={patch_file}"
     if not os.path.isfile(python_dir_patched_file):
         try:
             process = subprocess.run(
                 patch_cmd,
                 check=True,
                 shell=True,
                 stdout=subprocess.PIPE,
                 stderr=subprocess.STDOUT,
                 text=True,
                 universal_newlines=True,
                 cwd=python_dir_dst,
             )
         except subprocess.CalledProcessError as err:
             revert_patch = [
                 (f.path, f.path.replace(".orig", ""))
                 for f in scantree(python_dir_dst) if f.is_file()
                 and os.path.splitext(f.name)[1].lower() == ".orig"
             ]
             for pcs in revert_patch:
                 process = subprocess.run(
                     f"sudo cp {pcs[0]} {pcs[1]}",
                     check=False,
                     shell=True,
                     stdout=subprocess.PIPE,
                     stderr=subprocess.STDOUT,
                     text=True,
                     universal_newlines=True,
                     cwd=python_dir_dst,
                 )
             print_fatal(
                 f"Unable to patch custom flags in {python_dir_dst}: {err}")
             sys.exit(1)
         process = subprocess.run(
             f"echo patched | sudo tee patched",
             check=False,
             shell=True,
             stdout=subprocess.PIPE,
             stderr=subprocess.STDOUT,
             text=True,
             universal_newlines=True,
             cwd=python_dir_dst,
         )
Beispiel #25
0
def really_download(upstream_url, destination):
    """
    Ok, really download the tarball from url
    """
    with open(destination, 'wb') as dfile:
        c = pycurl.Curl()
        c.setopt(c.URL, upstream_url)
        c.setopt(c.WRITEDATA, dfile)
        c.setopt(c.FOLLOWLOCATION, True)
        try:
            c.perform()
        except pycurl.error:
            print_fatal(f"unable to download {upstream_url}")
            exit(1)
        finally:
            c.close()
Beispiel #26
0
 def parse_buildroot_log(self, filename, returncode):
     """Handle buildroot log contents."""
     if returncode == 0:
         return True
     self.must_restart = 0
     is_clean = True
     util.call("sync")
     with util.open_auto(filename, "r") as rootlog:
         loglines = rootlog.readlines()
     missing_pat = re.compile(r"^.*No matching package to install: '(.*)'$")
     for line in loglines:
         match = missing_pat.match(line)
         if match is not None:
             util.print_fatal("Cannot resolve dependency name: {}".format(match.group(1)))
             is_clean = False
     return is_clean
Beispiel #27
0
 def set_tar_prefix(self):
     """Determine prefix folder name of tar file."""
     if tarfile.is_tarfile(self.path):
         with tarfile.open(self.path, 'r') as content:
             lines = content.getnames()
             # When tarball is not empty
             if len(lines) == 0:
                 print_fatal("Tar file doesn't appear to have any content")
                 exit(1)
             elif len(lines) > 1:
                 if 'package.xml' in lines and self.pattern in ['phpize']:
                     lines.remove('package.xml')
                 self.prefix = os.path.commonpath(lines)
     else:
         print_fatal("Not a valid tar file.")
         exit(1)
Beispiel #28
0
def license_from_copying_hash(copying, srcdir):
    """Add licenses based on the hash of the copying file"""
    hash_sum = tarball.get_sha1sum(copying)

    if config.license_fetch:
        with open(copying, "r", encoding="latin-1") as myfile:
            data = myfile.read()

        values = {'hash': hash_sum, 'text': data, 'package': tarball.name}
        data = urllib.parse.urlencode(values)
        data = data.encode('utf-8')

        buffer = BytesIO()
        c = pycurl.Curl()
        c.setopt(c.URL, config.license_fetch)
        c.setopt(c.WRITEDATA, buffer)
        c.setopt(c.POSTFIELDS, data)
        c.setopt(c.FOLLOWLOCATION, 1)
        try:
            c.perform()
        except Exception as excep:
            print_fatal("Failed to fetch license from {}: {}"
                        .format(config.license_fetch, excep))
            c.close()
            sys.exit(1)

        c.close()

        response = buffer.getvalue()
        page = response.decode('utf-8').strip()
        if page:
            print("License     : ", page, " (server) (", hash_sum, ")")
            add_license(page)
            if page != "none":
                license_files.append(copying[len(srcdir) + 1:])

            return

    if hash_sum in config.license_hashes:
        add_license(config.license_hashes[hash_sum])
    else:
        if not config.license_show:
            return
        print_warning("Unknown license {0} with hash {1}".format(copying, hash_sum))
        hash_url = config.license_show % {'HASH': hash_sum}
        print_warning("Visit {0} to enter".format(hash_url))
Beispiel #29
0
def examine_abi_host(download_path, results_dir, name):
    """Make use of the hostside abireport tool."""
    rpms = set()
    for item in os.listdir(results_dir):
        namelen = len(name)
        if item.find("-extras-", namelen) >= namelen:
            continue
        if item.endswith(".rpm") and not item.endswith(".src.rpm"):
            rpms.add("{}/{}".format(results_dir, item))

    if len(rpms) == 0:
        util.print_fatal("No usable rpms found, aborting")
        sys.exit(1)

    try:
        util.call("abireport scan-packages {}".format(" ".join(rpms)),
                  cwd=download_path)
    except Exception as e:
        util.print_fatal("Error invoking abireport: {}".format(e))
Beispiel #30
0
def git_describe_custom_re(clone_path, conf):
    outputVersion1 = ""
    git_describe_cmd1 = f"git describe --abbrev=0 --tags"
    git_describe_cmd1_result = ""
    process = subprocess.run(
        git_describe_cmd1,
        check=False,
        shell=True,
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
        text=True,
        universal_newlines=True,
        cwd=clone_path,
    )
    git_describe_cmd1_result = process.stdout

    if git_describe_cmd1_result:
        #if util.debugging:
            #print_debug(f"conf.custom_git_re2: {conf.custom_git_re2}")
        git_describe_cmd2_re1_pre = r"{0}".format(conf.custom_git_re2)
        git_describe_cmd2_re1 = re.Pattern
        try:
            git_describe_cmd2_re1 = re.compile(git_describe_cmd2_re1_pre, re.MULTILINE)
        except re.error as err:
                print_fatal(f"Custom git regex: {git_describe_cmd2_re1.pattern}")
                print_fatal(f"Unable to create custom git regex: {err}")
        print_info(f"Custom git regex 2: {git_describe_cmd2_re1.pattern}")
        git_describe_cmd2_re1_result = git_describe_cmd2_re1.search(git_describe_cmd1_result)
        if git_describe_cmd2_re1_result:
            #if util.debugging:
                #print_debug(f"{git_describe_cmd2_re1_result.group(1)}.{git_describe_cmd2_re1_result.group(2)}.{git_describe_cmd2_re1_result.group(3)}.{git_describe_cmd2_re1_result.group(4)}.{git_describe_cmd2_re1_result.group(5)}.{git_describe_cmd2_re1_result.group(6)}.{git_describe_cmd2_re1_result.group(7)}")
            if git_describe_cmd2_re1_result.group(1):
                outputVersion1 = f"{git_describe_cmd2_re1_result.group(1)}"
            if git_describe_cmd2_re1_result.group(2):
                outputVersion1 = f"{outputVersion1}.{git_describe_cmd2_re1_result.group(2)}"
            if git_describe_cmd2_re1_result.group(3):
                outputVersion1 = f"{outputVersion1}.{git_describe_cmd2_re1_result.group(3)}"
            if git_describe_cmd2_re1_result.group(4):
                outputVersion1 = f"{outputVersion1}.{git_describe_cmd2_re1_result.group(4)}"
            if git_describe_cmd2_re1_result.group(5):
                outputVersion1 = f"{outputVersion1}{git_describe_cmd2_re1_result.group(5)}"
            return outputVersion1
Beispiel #31
0
def check_url_content(url):
    """
    Check that the url to the infile file is raw or in plaintext.

    This function checks the header content-type for a request to the infile
    url. If it is html it converts eith a github url to point to the raw file,
    a git url to point to the plaintext file, or returns None with a statement
    that the file must be in plaintext or raw.
    """
    if "text/html" in requests.head(url).headers['content-type']:
        if re.match(r'^(http)s?://github.com(.*)', url):
            url = url.replace("github",
                              "raw.githubusercontent").replace("blob/", "")
        elif 'git' in url and "/tree/" in url:
            url = url.replace("/tree/", "/plain/", 1)
        else:
            util.print_fatal("infile url has an html content-type, "
                             "please use plaintext.")
            return None

    return url
Beispiel #32
0
def git_clone(url, path, cmd_args, clone_path, force_module, force_fullclone, is_fatal=True):
    try:
        if force_module is True:
            if force_fullclone is True:
                print_info(f"git clone -j8 --branch={cmd_args}")
                call(f"git clone -j8 --branch={cmd_args}", cwd=path)
            else:
                print_info(f"git clone --single-branch -j8 --branch={cmd_args}")
                call(f"git clone --depth=1 --single-branch -j8 --branch={cmd_args}", cwd=path)
        else:
            if force_fullclone is True:
                print_info(f"git clone --recurse-submodules -j8 --branch={cmd_args}")
                call(f"git clone --recurse-submodules -j8 --branch={cmd_args}", cwd=path)
            else:
                print_info(f"git clone --single-branch --shallow-submodules --recurse-submodules -j8 --branch={cmd_args}")
                call(f"git clone --depth=1 --single-branch --shallow-submodules --recurse-submodules -j8 --branch={cmd_args}", cwd=path)
    except subprocess.CalledProcessError as err:
        if is_fatal:
            remove_clone_archive(path, clone_path, is_fatal)
            print_fatal(f"Unable to clone {url} in {clone_path}: {err}")
            sys.exit(1)
Beispiel #33
0
def examine_abi(download_path):
    download_path = os.path.abspath(download_path)
    results_dir = os.path.abspath(os.path.join(download_path, "results"))

    if not os.path.exists(results_dir):
        util.print_fatal("Results directory does not exist, aborting")
        sys.exit(1)

    old_dir = os.getcwd()

    rpms = set()
    for item in os.listdir(results_dir):
        if item.endswith(".rpm") and not item.endswith(".src.rpm"):
            rpms.add(os.path.basename(item))

    if len(rpms) == 0:
        util.print_fatal("No usable rpms found, aborting")
        sys.exit(1)

    extract_dir = os.path.abspath(os.path.join(download_path, "__extraction"))
    purge_tree(extract_dir)

    try:
        os.makedirs(extract_dir)
    except Exception as e:
        util.print_fatal("Cannot create extraction tree: {}".format(e))
        sys.exit(1)

    os.chdir(extract_dir)

    # Extract all those rpms to our current directory
    try:
        for rpm in rpms:
            cmd = "rpm2cpio \"{}\" | cpio -imd 2>/dev/null".format(os.path.join(results_dir, rpm))
            subprocess.check_call(cmd, shell=True)
    except Exception as e:
        util.print_fatal("Error extracting RPMS: {}".format(e))

    os.chdir(download_path)
    collected_files = set()

    # Places we expect to find shared libraries
    for check_path in valid_dirs:
        if check_path[0] == '/':
            check_path = check_path[1:]

        dirn = os.path.join(extract_dir, check_path)
        if not os.path.isdir(dirn):
            continue

        for file in os.listdir(dirn):
            f = os.path.basename(file)

            clean_path = os.path.abspath(os.path.join(dirn, f))
            if not is_file_valid(clean_path):
                continue
            collected_files.add(clean_path)

    abi_report = dict()

    # Now examine these libraries
    for library in sorted(collected_files):
        soname = get_soname(library)
        if not soname:
            warn = "Failed to determine soname of: {}".format(library)
            util.print_warning(warn)
            soname = os.path.basename(library)
        symbols = dump_symbols(library)
        if symbols and len(symbols) > 0:
            if soname not in abi_report:
                abi_report[soname] = set()
            abi_report[soname].update(symbols)

    report_file = os.path.join(download_path, "symbols")

    if len(abi_report) > 0:
        # Finally, write the report
        report = open(report_file, "w", encoding="utf-8")
        for soname in sorted(abi_report.keys()):
            for symbol in sorted(abi_report[soname]):
                report.write("{}:{}\n".format(soname, symbol))

        report.close()
    else:
        truncate_file(report_file)

    # Write the library report
    lib_deps = get_all_dependencies(extract_dir)
    report_file = os.path.join(download_path, "used_libs")
    if len(lib_deps) > 0:
        report = open(report_file, "w", encoding="utf-8")
        for soname in sorted(lib_deps):
            report.write("{}\n".format(soname))
        report.close()
    else:
        truncate_file(report_file)

    os.chdir(old_dir)
    purge_tree(extract_dir)