def test_set_build_pattern(self): """ Test set_build_pattern with sufficient pattern strength """ buildpattern.set_build_pattern("configure_ac", 1) self.assertEqual(buildpattern.default_pattern, "configure_ac") self.assertEqual(buildpattern.pattern_strength, 1)
def parse_configure_ac(filename): """ Parse the configure.ac file for build requirements """ buf = "" depth = 0 # print("Configure parse: ", filename) buildpattern.set_build_pattern("configure_ac", 1) f = open(filename, "r", encoding="latin-1") while 1: c = f.read(1) if not c: break if c == "(": depth += 1 if c == ")" and depth > 0: depth -= 1 if c != "\n": buf += c if c == "\n" and depth == 0: configure_ac_line(buf) buf = "" configure_ac_line(buf) f.close()
def detect_build_from_url(url): """ Detect build patterns and build requirements from the patterns detected in the url. """ # R package if "cran.r-project.org" in url or "cran.rstudio.com" in url: buildpattern.set_build_pattern("R", 10) buildreq.add_buildreq("clr-R-helpers") # python if "pypi.python.org" in url or "pypi.debian.net" in url: buildpattern.set_build_pattern("distutils23", 10) buildreq.add_buildreq("python3-dev") buildreq.add_buildreq("python-dev") # cpan if ".cpan.org/" in url or ".metacpan.org/" in url: buildpattern.set_build_pattern("cpan", 10) # ruby if "rubygems.org/" in url: buildpattern.set_build_pattern("ruby", 10) # maven if ".maven." in url: buildpattern.set_build_pattern("maven", 10)
def test_set_build_pattern_low_strength(self): """ Test set_build_pattern with low pattern strength, nothing in the module should change """ buildpattern.pattern_strength = 2 buildpattern.set_build_pattern("configure_ac", 1) self.assertEqual(buildpattern.default_pattern, "make") self.assertEqual(buildpattern.pattern_strength, 2)
def parse_cargo_toml(filename): """Update build requirements using Cargo.toml Set the build requirements for building rust programs using cargo. """ global cargo_bin buildpattern.set_build_pattern("cargo", 1) add_buildreq("rustc") with open(filename, "r", encoding="latin-1") as ctoml: cargo = toml.loads(ctoml.read()) if cargo.get("bin") or os.path.exists(os.path.join(os.path.dirname(filename), "src/main.rs")): cargo_bin = True if not cargo.get("dependencies"): return for bdep in cargo["dependencies"]: add_buildreq(bdep)
def parse_cargo_toml(self, filename, packages): """Update build requirements using Cargo.toml. Set the build requirements for building rust programs using cargo. """ buildpattern.set_build_pattern("cargo", 1) self.add_buildreq("rustc") with util.open_auto(filename, "r") as ctoml: cargo = toml.loads(ctoml.read()) if cargo.get("bin") or os.path.exists( os.path.join(os.path.dirname(filename), "src/main.rs")): self.cargo_bin = True if not cargo.get("dependencies"): return for cdep in cargo["dependencies"]: if self.add_buildreq(cdep): self.add_requires(cdep, packages)
def parse_configure_ac(filename): buffer = "" depth = 0 # print("Configure parse: ", filename) buildpattern.set_build_pattern("configure_ac", 1) file = open(filename, "r", encoding="latin-1") while 1: c = file.read(1) if len(c) == 0: break if c == "(": depth = depth + 1 if c == ")" and depth > 0: depth = depth - 1 if c != "\n": buffer = buffer + c if c == "\n" and depth == 0: configure_ac_line(buffer) buffer = "" configure_ac_line(buffer) file.close()
def parse_configure_ac(self, filename, conf32): """Parse the configure.ac file for build requirements.""" buf = "" depth = 0 # print("Configure parse: ", filename) buildpattern.set_build_pattern("configure_ac", 1) f = util.open_auto(filename, "r") while 1: c = f.read(1) if not c: break if c == "(": depth += 1 if c == ")" and depth > 0: depth -= 1 if c != "\n": buf += c if c == "\n" and depth == 0: self.configure_ac_line(buf, conf32) buf = "" self.configure_ac_line(buf, conf32) f.close()
def detect_build_from_url(url): """Detect build patterns and build requirements from the patterns detected in the url.""" # R package if "cran.r-project.org" in url or "cran.rstudio.com" in url: buildpattern.set_build_pattern("R", 10) # python if "pypi.python.org" in url or "pypi.debian.net" in url: buildpattern.set_build_pattern("distutils3", 10) # cpan if ".cpan.org/" in url or ".metacpan.org/" in url: buildpattern.set_build_pattern("cpan", 10) # ruby if "rubygems.org/" in url: buildpattern.set_build_pattern("ruby", 10) # maven if ".maven." in url: buildpattern.set_build_pattern("maven", 10) # rust crate if "crates.io" in url: buildpattern.set_build_pattern("cargo", 10) # go dependency if "proxy.golang.org" in url: buildpattern.set_build_pattern("godep", 10)
def scan_for_configure(dirn): """Scan the package directory for build files to determine build pattern.""" if buildpattern.default_pattern == "distutils": add_buildreq("buildreq-distutils") elif buildpattern.default_pattern == "distutils36": add_buildreq("buildreq-distutils36") elif buildpattern.default_pattern == "distutils23": add_buildreq("buildreq-distutils23") elif buildpattern.default_pattern == "distutils3": add_buildreq("buildreq-distutils3") elif buildpattern.default_pattern == "golang": add_buildreq("buildreq-golang") elif buildpattern.default_pattern == "cmake": add_buildreq("buildreq-cmake") elif buildpattern.default_pattern == "configure": add_buildreq("buildreq-configure") elif buildpattern.default_pattern == "qmake": add_buildreq("buildreq-qmake") elif buildpattern.default_pattern == "cpan": add_buildreq("buildreq-cpan") elif buildpattern.default_pattern == "scons": add_buildreq("buildreq-scons") elif buildpattern.default_pattern == "R": add_buildreq("buildreq-R") parse_r_description(os.path.join(dirn, "DESCRIPTION")) elif buildpattern.default_pattern == "phpize": add_buildreq("buildreq-php") count = 0 for dirpath, _, files in os.walk(dirn): default_score = 2 if dirpath == dirn else 1 if any(f.endswith(".go") for f in files): add_buildreq("buildreq-golang") buildpattern.set_build_pattern("golang", default_score) if "go.mod" in files: if "Makefile" not in files: # Go packages usually have make build systems so far # so only use go directly if we can't find a Makefile buildpattern.set_build_pattern("golang", default_score) add_buildreq("buildreq-golang") config.set_gopath = False mod_path = os.path.join(dirpath, "go.mod") reqs = parse_go_mod(mod_path) for req in reqs: # req[0] is a SCM url segment in the form, repo/XXX/dependency-name # req[1] is the version of the dependency pkg = "go-" + req[0].replace("/", "-") add_buildreq(pkg) if buildpattern.default_pattern == "godep": add_requires(pkg) if "CMakeLists.txt" in files and "configure.ac" not in files: add_buildreq("buildreq-cmake") buildpattern.set_build_pattern("cmake", default_score) srcdir = os.path.abspath(os.path.join(dirn, "clr-build", config.cmake_srcdir or "..")) if os.path.samefile(dirpath, srcdir): parse_catkin_deps(os.path.join(srcdir, "CMakeLists.txt")) if "configure" in files and os.access(dirpath + '/configure', os.X_OK): buildpattern.set_build_pattern("configure", default_score) elif any(is_qmake_pro(f) for f in files): add_buildreq("buildreq-qmake") buildpattern.set_build_pattern("qmake", default_score) if "requires.txt" in files: grab_python_requirements(dirpath + '/requires.txt') if "setup.py" in files: add_buildreq("buildreq-distutils3") add_setup_py_requires(dirpath + '/setup.py') python_pattern = get_python_build_version_from_classifier(dirpath + '/setup.py') buildpattern.set_build_pattern(python_pattern, default_score) if "Makefile.PL" in files or "Build.PL" in files: buildpattern.set_build_pattern("cpan", default_score) add_buildreq("buildreq-cpan") if "SConstruct" in files: add_buildreq("buildreq-scons") buildpattern.set_build_pattern("scons", default_score) if "requirements.txt" in files: grab_python_requirements(dirpath + '/requirements.txt') if "meson.build" in files: add_buildreq("buildreq-meson") buildpattern.set_build_pattern("meson", default_score) if "config.m4" in files: add_buildreq("buildreq-php") buildpattern.set_build_pattern("phpize", 1) if "pom.xml" in files: # Pretty straightforward maven source package add_buildreq("apache-maven") add_buildreq("buildreq-mvn") buildpattern.set_build_pattern("maven", default_score) if "build.xml" in files: add_buildreq("apache-ant") add_buildreq("buildreq-mvn") buildpattern.set_build_pattern("ant", default_score) # But wait, this might use maven! for f in files: if f.endswith('/build.xml'): for line in open(f): if "<artifact:mvn>" in line: buildpattern.set_build_pattern("maven", default_score) break if "build.gradle" in files: add_buildreq("gradle") add_buildreq("buildreq-mvn") buildpattern.set_build_pattern("gradle", default_score) for name in files: if name.lower() == "cargo.toml" and dirpath == dirn: parse_cargo_toml(os.path.join(dirpath, name)) if name.lower().startswith("configure."): parse_configure_ac(os.path.join(dirpath, name)) if name.lower().startswith("rakefile") and buildpattern.default_pattern == "ruby": rakefile(os.path.join(dirpath, name)) if name.endswith(".pro") and buildpattern.default_pattern == "qmake": qmake_profile(os.path.join(dirpath, name)) if name.lower() == "makefile": buildpattern.set_build_pattern("make", default_score) if name.lower() == "autogen.sh": buildpattern.set_build_pattern("autogen", default_score) if name.lower() == "cmakelists.txt": buildpattern.set_build_pattern("cmake", default_score) if (name.lower() == "cmakelists.txt" or name.endswith(".cmake")) \ and buildpattern.default_pattern == "cmake": parse_cmake(os.path.join(dirpath, name)) can_reconf = os.path.exists(os.path.join(dirn, "configure.ac")) if not can_reconf: can_reconf = os.path.exists(os.path.join(dirn, "configure.in")) if can_reconf and config.autoreconf: print("Patches touch configure.*, adding autoreconf stage") for breq in autoreconf_reqs: add_buildreq(breq) else: config.autoreconf = False print("Buildreqs : ", end="") for lic in sorted(buildreqs): if count > 4: count = 0 print("\nBuildreqs : ", end="") count = count + 1 print(lic + " ", end="") print("")
def download_tarball(url_argument, name_argument, archives): global name global rawname global version global url global path global tarball_prefix global gcov_file url = url_argument tarfile = os.path.basename(url) pattern_options = [ r"(.*?)[\-_](v*[0-9]+[alpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.src\.(tgz|tar|zip)", r"(.*?)[\-_](v*[0-9]+[alpha\+_sbpfourcesigedsvstableP0-9\.\-\~]*)\.(tgz|tar|zip)", r"(.*?)[\-_](v*[0-9]+[a-zalpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.orig\.tar", r"(.*?)[\-_](v*[0-9]+[\+_spbfourcesigedsvstableP0-9\.\~]*)(-.*?)?\.tar", ] for pattern in pattern_options: p = re.compile(pattern) m = p.search(tarfile) if m: name = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] break rawname = name # R package if url_argument.find("cran.r-project.org") > 0 or url_argument.find("cran.rstudio.com") > 0: buildpattern.set_build_pattern("R", 10) files.want_dev_split = 0 buildreq.add_buildreq("clr-R-helpers") p = re.compile(r"([A-Za-z0-9]+)_(v*[0-9]+[\+_spbfourcesigedsvstableP0-9\.\~\-]*)\.tar\.gz") m = p.search(tarfile) if m: name = "R-" + m.group(1).strip() rawname = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] if url_argument.find("pypi.python.org") > 0: buildpattern.set_build_pattern("distutils", 10) if url_argument.find(".cpan.org/CPAN/") > 0: buildpattern.set_build_pattern("cpan", 10) if name: name = "perl-" + name if url_argument.find(".metacpan.org/") > 0: buildpattern.set_build_pattern("cpan", 10) if name: name = "perl-" + name if url_argument.find("github.com") > 0: p = re.compile(r"https://github.com/.*/(.*?)/archive/v?(.*).tar") m = p.search(url_argument) if m: name = m.group(1).strip() version = m.group(2).strip() if url_argument.find("bitbucket.org") > 0: p = re.compile(r"https://bitbucket.org/.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9_.]*).tar") m = p.search(url_argument) if m: name = m.group(1).strip() version = m.group(2).strip().replace('_', '.') # ruby if url_argument.find("rubygems.org/") > 0: buildpattern.set_build_pattern("ruby", 10) p = re.compile(r"(.*?)[\-_](v*[0-9]+[alpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.gem") m = p.search(tarfile) if m: buildreq.add_buildreq("ruby") buildreq.add_buildreq("rubygem-rdoc") name = "rubygem-" + m.group(1).strip() rawname = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] # override from commandline if name_argument and name_argument[0] != name: pattern = name_argument[0] + r"[\-]*(.*)\.(tgz|tar|zip)" p = re.compile(pattern) m = p.search(tarfile) if m: name = name_argument[0] rawname = name version = m.group(1).strip() b = version.find("-") if b >= 0: version = version[:b] if version.startswith('.'): version = version[1:] else: name = name_argument[0] if not name: split = url_argument.split('/') if len(split) > 3 and split[-2] in ('archive', 'tarball'): name = split[-3] version = split[-1] if version.startswith('v'): version = version[1:] # remove extension version = '.'.join(version.split('.')[:-1]) if version.endswith('.tar'): version = '.'.join(version.split('.')[:-1]) b = version.find("-") if b >= 0: b = b + 1 version = version[b:] if version[0] in ['v', 'r']: version = version[1:] assert name != "" build.download_path = os.getcwd() + "/" + name call("mkdir -p %s" % build.download_path) gcov_path = build.download_path + "/" + name + ".gcov" if os.path.isfile(gcov_path): gcov_file = name + ".gcov" tarball_path = check_or_get_file(url, tarfile) sha1 = get_sha1sum(tarball_path) with open(build.download_path + "/upstream", "w") as file: file.write(sha1 + "/" + tarfile + "\n") tarball_prefix = name + "-" + version if tarfile.lower().endswith('.zip'): tarball_contents = subprocess.check_output( ["unzip", "-l", tarball_path], universal_newlines=True) if tarball_contents and len(tarball_contents.splitlines()) > 3: tarball_prefix = tarball_contents.splitlines()[3].rsplit("/")[0].split()[-1] extract_cmd = "unzip -d {0} {1}".format(build.base_path, tarball_path) elif tarfile.lower().endswith('.gem'): tarball_contents = subprocess.check_output( ["gem", "unpack", "--verbose", tarball_path], universal_newlines=True) extract_cmd = "gem unpack --target={0} {1}".format(build.base_path, tarball_path) if tarball_contents: tarball_prefix = tarball_contents.splitlines()[-1].rsplit("/")[-1] if tarball_prefix.endswith("'"): tarball_prefix = tarball_prefix[:-1] else: extract_cmd, tarball_prefix = build_untar(tarball_path) print("\n") print("Processing", url_argument) print( "=============================================================================================") print("Name :", name) print("Version :", version) print("Prefix :", tarball_prefix) with open(build.download_path + "/Makefile", "w") as file: file.write("PKG_NAME := " + name + "\n") file.write("URL := " + url_argument + "\n") file.write("ARCHIVES :=") for archive in archives: file.write(" {}".format(archive)) file.write("\n") file.write("\n") file.write("include ../common/Makefile.common\n") shutil.rmtree("{}".format(build.base_path), ignore_errors=True) os.makedirs("{}".format(build.output_path)) call("mkdir -p %s" % build.download_path) call(extract_cmd) path = build.base_path + tarball_prefix for archive, destination in zip(archives[::2], archives[1::2]): source_tarball_path = check_or_get_file(archive, os.path.basename(archive)) if source_tarball_path.lower().endswith('.zip'): tarball_contents = subprocess.check_output( ["unzip", "-l", source_tarball_path], universal_newlines=True) if tarball_contents and len(tarball_contents.splitlines()) > 3: source_tarball_prefix = tarball_contents.splitlines()[3].rsplit("/")[0].split()[-1] extract_cmd = "unzip -d {0} {1}".format(build.base_path, source_tarball_path) else: extract_cmd, source_tarball_prefix = build_untar(source_tarball_path) buildpattern.archive_details[archive + "prefix"] = source_tarball_prefix call(extract_cmd) tar_files = glob.glob("{0}{1}/*".format(build.base_path, source_tarball_prefix)) move_cmd = "mv " for tar_file in tar_files: move_cmd += tar_file + " " move_cmd += '{0}/{1}'.format(path, destination) mkdir_cmd = "mkdir -p " mkdir_cmd += '{0}/{1}'.format(path, destination) print("mkdir " + mkdir_cmd) call(mkdir_cmd) call(move_cmd) sha1 = get_sha1sum(source_tarball_path) with open(build.download_path + "/upstream", "a") as file: file.write(sha1 + "/" + os.path.basename(archive) + "\n")
def name_and_version(url_argument, name_argument): global name global rawname global version global url url = url_argument tarfile = os.path.basename(url) # it is important for the more specific patterns to come first pattern_options = [ r"(.*?)[\-_](v*[0-9]+[a-zalpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.orig\.tar", r"(.*?)[\-_](v*[0-9]+[alpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.src\.(tgz|tar|zip)", r"(.*?)[\-_](v*[0-9]+[alpha\+_sbpfourcesigedsvstableP0-9\.\-\~]*)\.(tgz|tar|zip)", r"(.*?)[\-_](v*[0-9]+[\+_spbfourcesigedsvstableP0-9\.\~]*)(-.*?)?\.tar", ] for pattern in pattern_options: p = re.compile(pattern) m = p.search(tarfile) if m: name = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] break rawname = name # R package if url_argument.find("cran.r-project.org") > 0 or url_argument.find("cran.rstudio.com") > 0: buildpattern.set_build_pattern("R", 10) files.want_dev_split = 0 buildreq.add_buildreq("clr-R-helpers") p = re.compile(r"([A-Za-z0-9]+)_(v*[0-9]+[\+_spbfourcesigedsvstableP0-9\.\~\-]*)\.tar\.gz") m = p.search(tarfile) if m: name = "R-" + m.group(1).strip() rawname = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] if url_argument.find("pypi.python.org") > 0: buildpattern.set_build_pattern("distutils", 10) url_argument = "http://pypi.debian.net/" + name + "/" + tarfile if url_argument.find("pypi.debian.net") > 0: buildpattern.set_build_pattern("distutils", 10) if url_argument.find(".cpan.org/CPAN/") > 0: buildpattern.set_build_pattern("cpan", 10) if name: name = "perl-" + name if url_argument.find(".metacpan.org/") > 0: buildpattern.set_build_pattern("cpan", 10) if name: name = "perl-" + name if "github.com" in url_argument: # define regex accepted for valid packages, important for specific # patterns to come before general ones github_patterns = [r"https://github.com/.*/(.*?)/archive/(.*)-final.tar", r"https://github.com/.*/.*/archive/[0-9a-fA-F]{1,40}\/(.*)\-(.*).tar", r"https://github.com/.*/(.*?)/archive/v?(.*).orig.tar", r"https://github.com/.*/(.*?)/archive/(.*).zip", r"https://github.com/.*/(.*?)/archive/v?(.*).tar"] for pattern in github_patterns: p = re.compile(pattern) m = p.search(url_argument) if m: name = m.group(1).strip() # convert from 7_4_2 to 7.4.2 version = m.group(2).strip().replace('_', '.') # remove release candidate tag b = version.find("-rc") if b > 0: version = version[:b] b = version.find("-") if b > 0: version = version[b + 1:] break if url_argument.find("bitbucket.org") > 0: p = re.compile(r"https://bitbucket.org/.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9_.]*).tar") m = p.search(url_argument) if m: name = m.group(1).strip() # convert from 7_4_2 to 7.4.2 version = m.group(2).strip().replace('_', '.') else: version = "1" # ruby if url_argument.find("rubygems.org/") > 0: buildpattern.set_build_pattern("ruby", 10) p = re.compile(r"(.*?)[\-_](v*[0-9]+[alpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.gem") m = p.search(tarfile) if m: name = "rubygem-" + m.group(1).strip() # remove release candidate tag b = name.find("-rc") if b > 0: name = name[:b] rawname = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] # maven if url_argument.find("maven.org") > 0: buildpattern.set_build_pattern("maven", 10) # override from commandline if name_argument and name_argument[0] != name: pattern = name_argument[0] + r"[\-]*(.*)\.(tgz|tar|zip)" p = re.compile(pattern) m = p.search(tarfile) if m: name = name_argument[0] rawname = name version = m.group(1).strip() b = version.find("-") if b >= 0 and version.find("-beta") < 0: version = version[:b] if version.startswith('.'): version = version[1:] else: name = name_argument[0] if not name: split = url_argument.split('/') if len(split) > 3 and split[-2] in ('archive', 'tarball'): name = split[-3] version = split[-1] if version.startswith('v'): version = version[1:] # remove extension version = '.'.join(version.split('.')[:-1]) if version.endswith('.tar'): version = '.'.join(version.split('.')[:-1]) b = version.find("-") if b >= 0 and version.find("-beta") < 0: b = b + 1 version = version[b:] if len(version) > 0 and version[0].lower() in ['v', 'r']: version = version[1:] # remove package name from beginning of version if version.lower().startswith(name.lower()): pat = re.compile(re.escape(name), re.IGNORECASE) version = pat.sub('', version) if version[0] in ['.', '-', '_']: version = version[1:] assert name != ""
def scan_for_configure(package, dir, autospecdir): global default_summary count = 0 for dirpath, dirnames, files in os.walk(dir): default_score = 2 if dirpath != dir: default_score = 1 if any(file.endswith(".go") for file in files) and tarball.go_pkgname: add_buildreq("go") tarball.name = tarball.go_pkgname buildpattern.set_build_pattern("golang", default_score) if "CMakeLists.txt" in files and "configure.ac" not in files: add_buildreq("cmake") buildpattern.set_build_pattern("cmake", default_score) if "configure" in files and os.access(dirpath + '/configure', os.X_OK): buildpattern.set_build_pattern("configure", default_score) if "requires.txt" in files: grab_python_requirements(dirpath + '/requires.txt') if "setup.py" in files: add_buildreq("python-dev") add_buildreq("setuptools") add_buildreq("pbr") add_buildreq("pip") add_setup_py_requires(dirpath + '/setup.py') if setup_py_python3(dirpath + '/setup.py') or setup_py_python3(dirpath + '/PKG-INFO'): add_buildreq("python3-dev") buildpattern.set_build_pattern("distutils23", default_score) # force override the pypi rule if buildpattern.default_pattern == 'distutils' and buildpattern.pattern_strengh <= 10: buildpattern.default_pattern = 'distutils23' else: # check for adding python3 support in patches try: with open(autospecdir + '/series', 'r') as series: for patchname in series: if setup_py_python3(autospecdir + '/' + patchname.strip()): add_buildreq("python3-dev") buildpattern.set_build_pattern("distutils23", default_score) # force override the pypi rule if buildpattern.default_pattern == 'distutils' and buildpattern.pattern_strengh <= 10: buildpattern.default_pattern = 'distutils23' except: pass buildpattern.set_build_pattern("distutils", default_score) if "Makefile.PL" in files or "Build.PL" in files: buildpattern.set_build_pattern("cpan", default_score) if "SConstruct" in files: add_buildreq("scons") add_buildreq("python-dev") buildpattern.set_build_pattern("scons", default_score) if "requirements.txt" in files: grab_python_requirements(dirpath + '/requirements.txt') for name in files: if name.lower().startswith("configure."): parse_configure_ac(os.path.join(dirpath, name)) if name.lower().startswith("rakefile") and buildpattern.default_pattern == "ruby": Rakefile(os.path.join(dirpath, name)) if name.lower() == "makefile": buildpattern.set_build_pattern("make", default_score) if name.lower() == "autogen.sh": buildpattern.set_build_pattern("autogen", default_score) if name.lower() == "cmakelists.txt": buildpattern.set_build_pattern("cmake", default_score) can_reconf = os.path.exists(os.path.join(dir, "configure.ac")) if not can_reconf: can_reconf = os.path.exists(os.path.join(dir, "configure.in")) if can_reconf and patches.autoreconf: print("Patches touch configure.*, adding autoreconf stage") for breq in autoreconf_reqs: add_buildreq(breq) else: patches.autoreconf = False print("Buildreqs : ", end="") for lic in sorted(buildreqs): if count > 4: count = 0 print("\nBuildreqs : ", end="") count = count + 1 print(lic + " ", end="") print("")
def name_and_version(name_arg, version_arg, filemanager): """ Parse the url for the package name and version """ global name global rawname global version global url tarfile = os.path.basename(url) # If both name and version overrides are set via commandline, set the name # and version variables to the overrides and bail. If only one override is # set, continue to auto detect both name and version since the URL parsing # handles both. In this case, wait until the end to perform the override of # the one that was set. if name_arg and version_arg: # rawname == name in this case name = name_arg rawname = name version = version_arg return # it is important for the more specific patterns to come first pattern_options = [ r"(.*?)[\-_](v*[0-9]+[a-zalpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.orig\.tar", r"(.*?)[\-_](v*[0-9]+[alpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.src\.(tgz|tar|zip)", r"(.*?)[\-_](v*[0-9]+[alpha\+_sbpfourcesigedsvstableP0-9\.\-\~]*)\.(tgz|tar|zip)", r"(.*?)[\-_](v*[0-9]+[\+_spbfourcesigedsvstableP0-9\.\~]*)(-.*?)?\.tar", ] for pattern in pattern_options: p = re.compile(pattern) m = p.search(tarfile) if m: name = m.group(1).strip() version = convert_version(m.group(2)) break rawname = name # R package if url.find("cran.r-project.org") > 0 or url.find("cran.rstudio.com") > 0: buildpattern.set_build_pattern("R", 10) filemanager.want_dev_split = False buildreq.add_buildreq("clr-R-helpers") p = re.compile( r"([A-Za-z0-9.]+)_(v*[0-9]+[\+_spbfourcesigedsvstableP0-9\.\~\-]*)\.tar\.gz" ) m = p.search(tarfile) if m: name = "R-" + m.group(1).strip() rawname = m.group(1).strip() version = m.group(2).strip().replace('-', '.') if url.find("pypi.python.org") > 0: buildpattern.set_build_pattern("distutils23", 10) url = "http://pypi.debian.net/" + name + "/" + tarfile buildreq.add_buildreq("python3-dev") buildreq.add_buildreq("python-dev") if url.find("pypi.debian.net") > 0: buildpattern.set_build_pattern("distutils23", 10) buildreq.add_buildreq("python3-dev") buildreq.add_buildreq("python-dev") if url.find(".cpan.org/CPAN/") > 0: buildpattern.set_build_pattern("cpan", 10) if name: name = "perl-" + name if url.find(".metacpan.org/") > 0: buildpattern.set_build_pattern("cpan", 10) if name: name = "perl-" + name if "github.com" in url: # define regex accepted for valid packages, important for specific # patterns to come before general ones github_patterns = [ r"https?://github.com/.*/(.*?)/archive/(.*)-final.tar", r"https?://github.com/.*/.*/archive/[0-9a-fA-F]{1,40}\/(.*)\-(.*).tar", r"https?://github.com/.*/(.*?)/archive/v?(.*).orig.tar", r"https?://github.com/.*/(.*?)/archive/(.*).zip", r"https?://github.com/.*/(.*?)/archive/v?(.*).tar", r"https?://github.com/.*/(.*?)/releases/download/v.*/(.*).tar" ] for pattern in github_patterns: p = re.compile(pattern) m = p.search(url) if m: name = m.group(1).strip() rawname = name version = convert_version(m.group(2)) break if url.find("bitbucket.org") > 0: p = re.compile( r"https?://bitbucket.org/.*/(.*?)/.*/([.0-9a-zA-Z_-]*?).tar") m = p.search(url) if m: name = m.group(1).strip() version = convert_version(m.group(2)) else: version = "1" # ruby if url.find("rubygems.org/") > 0: buildpattern.set_build_pattern("ruby", 10) p = re.compile( r"(.*?)[\-_](v*[0-9]+[alpha\+_spbfourcesigedsvstableP0-9\.\-\~]*)\.gem" ) m = p.search(tarfile) if m: name = "rubygem-" + m.group(1).strip() # remove release candidate tag b = name.find("-rc") if b > 0: name = name[:b] rawname = m.group(1).strip() version = m.group(2).strip() b = version.find("-") if b >= 0: version = version[:b] # maven if url.find("maven.org") > 0: buildpattern.set_build_pattern("maven", 10) if not name: split = url.split('/') if len(split) > 3 and split[-2] in ('archive', 'tarball'): name = split[-3] version = split[-1] version = version.lstrip('v') # remove extension version = version.rsplit('.', 1)[0] if version.endswith('.tar'): version = version.replace('.tar', '') # override name and version from commandline name = name_arg if name_arg else name version = version_arg if version_arg else version # sanity check to make sure we aren't using an empty version if version == "": version = "1"
def parse_config_files(self, path, bump, filemanager, version, requirements): """Parse the various configuration files that may exist in the package directory.""" packages_file = None # Require autospec.conf for additional features if os.path.exists(self.config_file): config = configparser.ConfigParser(interpolation=None) config.read(self.config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) self.git_uri = config['autospec'].get('git', None) self.license_fetch = config['autospec'].get('license_fetch', None) self.license_show = config['autospec'].get('license_show', None) packages_file = config['autospec'].get('packages_file', None) self.yum_conf = config['autospec'].get('yum_conf', None) self.failed_pattern_dir = config['autospec'].get( 'failed_pattern_dir', None) # support reading the local files relative to config_file if packages_file and not os.path.isabs(packages_file): packages_file = os.path.join(os.path.dirname(self.config_file), packages_file) if self.yum_conf and not os.path.isabs(self.yum_conf): self.yum_conf = os.path.join(os.path.dirname(self.config_file), self.yum_conf) if self.failed_pattern_dir and not os.path.isabs( self.failed_pattern_dir): self.failed_pattern_dir = os.path.join( os.path.dirname(self.config_file), self.failed_pattern_dir) if not packages_file: print( "Warning: Set [autospec][packages_file] path to package list file for " "requires validation") packages_file = os.path.join(os.path.dirname(self.config_file), "packages") self.urlban = config['autospec'].get('urlban', None) # Read values from options.conf (and deprecated files) and rewrite as necessary self.read_config_opts(path) if not self.git_uri: print( "Warning: Set [autospec][git] upstream template for remote git URI configuration" ) if not self.license_fetch: print( "Warning: Set [autospec][license_fetch] uri for license fetch support" ) if not self.license_show: print( "Warning: Set [autospec][license_show] uri for license link check support" ) if not self.yum_conf: print( "Warning: Set [autospec][yum_conf] path to yum.conf file for whatrequires validation" ) self.yum_conf = os.path.join(os.path.dirname(self.config_file), "image-creator/yum.conf") if packages_file: self.os_packages = set( self.read_conf_file(packages_file, track=False)) else: self.os_packages = set( self.read_conf_file("~/packages", track=False)) wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " self.write_default_conf_file( path, "buildreq_ban", wrapper, "This file contains build requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") self.write_default_conf_file( path, "pkgconfig_ban", wrapper, "This file contains pkgconfig build requirements that get picked up but" " are undesirable. One entry per line, no whitespace.") self.write_default_conf_file( path, "requires_ban", wrapper, "This file contains runtime requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") self.write_default_conf_file( path, "buildreq_add", wrapper, "This file contains additional build requirements that did not get " "picked up automatically. One name per line, no whitespace.") self.write_default_conf_file( path, "pkgconfig_add", wrapper, "This file contains additional pkgconfig build requirements that did " "not get picked up automatically. One name per line, no whitespace." ) self.write_default_conf_file( path, "requires_add", wrapper, "This file contains additional runtime requirements that did not get " "picked up automatically. One name per line, no whitespace.") self.write_default_conf_file( path, "excludes", wrapper, "This file contains the output files that need %exclude. Full path " "names, one per line.") content = self.read_conf_file(os.path.join(path, "release")) if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = self.read_conf_file(os.path.join(path, "extra_sources")) for source in content: fields = source.split(maxsplit=1) print("Adding additional source file: %s" % fields[0]) self.config_files.add(os.path.basename(fields[0])) self.extra_sources.append(fields) content = self.read_conf_file(os.path.join(path, "buildreq_ban")) for banned in content: print("Banning build requirement: %s." % banned) requirements.banned_buildreqs.add(banned) requirements.buildreqs.discard(banned) requirements.buildreqs_cache.discard(banned) content = self.read_conf_file(os.path.join(path, "pkgconfig_ban")) for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) requirements.banned_buildreqs.add(banned) requirements.buildreqs.discard(banned) requirements.buildreqs_cache.discard(banned) content = self.read_conf_file(os.path.join(path, "requires_ban")) for banned in content: print("Banning runtime requirement: %s." % banned) requirements.banned_requires.add(banned) requirements.requires.discard(banned) content = self.read_conf_file(os.path.join(path, "buildreq_add")) for extra in content: print("Adding additional build requirement: %s." % extra) requirements.add_buildreq(extra) cache_file = os.path.join(path, "buildreq_cache") content = self.read_conf_file(cache_file) if content and content[0] == version: for extra in content[1:]: print("Adding additional build (cache) requirement: %s." % extra) requirements.add_buildreq(extra) else: try: os.unlink(cache_file) except FileNotFoundError: pass except Exception as e: print_warning(f"Unable to remove buildreq_cache file: {e}") content = self.read_conf_file(os.path.join(path, "pkgconfig_add")) for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) requirements.add_buildreq(extra) content = self.read_conf_file(os.path.join(path, "requires_add")) for extra in content: print("Adding additional runtime requirement: %s." % extra) requirements.add_requires(extra, self.os_packages, override=True) content = self.read_conf_file(os.path.join(path, "excludes")) for exclude in content: print("%%exclude for: %s." % exclude) filemanager.excludes += content content = self.read_conf_file(os.path.join(path, "extras")) for extra in content: print("extras for : %s." % extra) filemanager.extras += content for fname in os.listdir(path): if not re.search('.+_extras$', fname) or fname == "dev_extras": continue content = {} content['files'] = self.read_conf_file(os.path.join(path, fname)) if not content: print_warning(f"Error reading custom extras file: {fname}") continue req_file = os.path.join(path, f'{fname}_requires') if os.path.isfile(req_file): content['requires'] = self.read_conf_file(req_file) name = fname[:-len("_extras")] print(f"extras-{name} for {content['files']}") filemanager.custom_extras["extras-" + f"{name}"] = content content = self.read_conf_file(os.path.join(path, "dev_extras")) for extra in content: print("dev for : %s." % extra) filemanager.dev_extras += content content = self.read_conf_file(os.path.join(path, "setuid")) for suid in content: print("setuid for : %s." % suid) filemanager.setuid += content content = self.read_conf_file(os.path.join(path, "attrs")) for line in content: attr = line.split() filename = attr.pop() print("%attr({0},{1},{2}) for: {3}".format(attr[0], attr[1], attr[2], filename)) filemanager.attrs[filename] = attr self.patches += self.read_conf_file(os.path.join(path, "series")) pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in self.patches] cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am)|(aclocal.m4)|(configure.ac|configure.in))\" %s" % " ".join( pfiles) # noqa: W605 if self.patches and call(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: self.autoreconf = True # Parse the version-specific patch lists update_security_sensitive = False for version in self.versions: self.verpatches[version] = self.read_conf_file( os.path.join(path, '.'.join(['series', version]))) if any(p.lower().startswith('cve-') for p in self.verpatches[version]): update_security_sensitive = True if any(p.lower().startswith('cve-') for p in self.patches): update_security_sensitive = True if update_security_sensitive: self.config_opts['security_sensitive'] = True self.rewrite_config_opts(path) content = self.read_conf_file(os.path.join(path, "configure")) self.extra_configure = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure32")) self.extra_configure32 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure64")) self.extra_configure64 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure_avx2")) self.extra_configure_avx2 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure_avx512")) self.extra_configure_avx512 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure_openmpi")) self.extra_configure_openmpi = " \\\n".join(content) if self.config_opts["keepstatic"]: self.disable_static = "" if self.config_opts['broken_parallel_build']: self.parallel_build = "" content = self.read_conf_file(os.path.join(path, "make_args")) if content: self.extra_make = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "make32_args")) if content: self.extra32_make = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "make_install_args")) if content: self.extra_make_install = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "make32_install_args")) if content: self.extra_make32_install = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "install_macro")) if content and content[0]: self.install_macro = content[0] content = self.read_conf_file(os.path.join(path, "cmake_args")) if content: self.extra_cmake = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "cmake_args_openmpi")) if content: self.extra_cmake_openmpi = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "cmake_srcdir")) if content and content[0]: self.cmake_srcdir = content[0] content = self.read_conf_file(os.path.join(path, "subdir")) if content and content[0]: self.subdir = content[0] content = self.read_conf_file(os.path.join(path, "build_pattern")) if content and content[0]: buildpattern.set_build_pattern(content[0], 20) self.autoreconf = False content = self.read_script_file( os.path.join(path, "make_check_command")) if content: check.tests_config = '\n'.join(content) content = self.read_conf_file( os.path.join(path, tarball.name + ".license")) if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: if not license.add_license(word, self.license_translations, self.license_blacklist): print_warning( "{}: blacklisted license {} ignored.".format( tarball.name + ".license", word)) content = self.read_conf_file(os.path.join(path, "golang_libpath")) if content and content[0]: tarball.golibpath = content[0] print("golibpath : {}".format(tarball.golibpath)) if self.config_opts['use_clang']: self.config_opts['funroll-loops'] = False requirements.add_buildreq("llvm") if self.config_opts['32bit']: requirements.add_buildreq("glibc-libc32") requirements.add_buildreq("glibc-dev32") requirements.add_buildreq("gcc-dev32") requirements.add_buildreq("gcc-libgcc32") requirements.add_buildreq("gcc-libstdc++32") if self.config_opts['openmpi']: requirements.add_buildreq("openmpi-dev") requirements.add_buildreq("modules") # MPI testsuites generally require "openssh" requirements.add_buildreq("openssh") self.prep_prepend = self.read_script_file( os.path.join(path, "prep_prepend")) if os.path.isfile(os.path.join(path, "prep_append")): os.rename(os.path.join(path, "prep_append"), os.path.join(path, "build_prepend")) self.make_prepend = self.read_script_file( os.path.join(path, "make_prepend")) self.build_prepend = self.read_script_file( os.path.join(path, "build_prepend")) self.build_append = self.read_script_file( os.path.join(path, "build_append")) self.install_prepend = self.read_script_file( os.path.join(path, "install_prepend")) if os.path.isfile(os.path.join(path, "make_install_append")): os.rename(os.path.join(path, "make_install_append"), os.path.join(path, "install_append")) self.install_append = self.read_script_file( os.path.join(path, "install_append")) self.service_restart = self.read_conf_file( os.path.join(path, "service_restart")) self.profile_payload = self.read_script_file( os.path.join(path, "profile_payload")) self.custom_desc = self.read_conf_file( os.path.join(path, "description")) self.custom_summ = self.read_conf_file(os.path.join(path, "summary"))
def parse_config_files(path, bump): global extra_configure global extra_configure32 global config_files global config_path global parallel_build global license_fetch global license_show global git_uri global urlban global config_file global profile_payload global config_opts config_path = path read_config_opts() # Require autospec.conf for additional features if os.path.exists(config_file): config = configparser.ConfigParser(interpolation=None) config.read(config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) git_uri = config["autospec"].get("git", None) license_fetch = config["autospec"].get("license_fetch", None) license_show = config["autospec"].get("license_show", None) urlban = config["autospec"].get("urlban", None) if not git_uri: print("Warning: Set [autospec][git] upstream template for git support") if not license_fetch: print("Warning: Set [autospec][license_fetch] uri for license fetch support") if not license_show: print("Warning: Set [autospec][license_show] uri for license link check support") wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " def write_default_conf_file(name, description): config_files.add(name) filename = path + "/" + name if os.path.isfile(filename): return with open(filename, "w") as f: f.write(wrapper.fill(description) + "\n") write_default_conf_file( "buildreq_ban", "This file contains build requirements that get picked up but are undesirable. One entry per line, no whitespace.", ) write_default_conf_file( "pkgconfig_ban", "This file contains pkgconfig build requirements that get picked up but are undesirable. One entry per line, no whitespace.", ) write_default_conf_file( "buildreq_add", "This file contains additional build requirements that did not get picked up automatically. One name per line, no whitespace.", ) write_default_conf_file( "pkgconfig_add", "This file contains additional pkgconfig build requirements that did not get picked up automatically. One name per line, no whitespace.", ) write_default_conf_file( "excludes", "This file contains the output files that need %exclude. Full path names, one per line." ) content = read_conf_file("release") if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = read_conf_file("buildreq_ban") for banned in content: print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("pkgconfig_ban") for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("buildreq_add") for extra in content: print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("pkgconfig_add") for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("excludes") for exclude in content: print("%%exclude for: %s." % exclude) files.excludes += content content = read_conf_file("extras") for extra in content: print("extras for: %s." % extra) files.extras += content content = read_conf_file("setuid") for suid in content: print("setuid for: %s." % suid) files.setuid += content content = read_conf_file("attrs") for line in content: attr = re.split("\(|\)|,", line) attr = [a.strip() for a in attr] filename = attr.pop() print("attr for: %s." % filename) files.attrs[filename] = attr patches.patches += read_conf_file("series") pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches.patches] cmd = 'egrep "(\+\+\+|\-\-\-).*((Makefile.am)|(configure.ac|configure.in))" %s' % " ".join(pfiles) if len(patches.patches) > 0 and call(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: patches.autoreconf = True content = read_conf_file("configure") extra_configure = " \\\n".join(content) content = read_conf_file("configure32") extra_configure32 = " \\\n".join(content) if config_opts["keepstatic"]: buildpattern.disable_static = "" if config_opts["broken_parallel_build"]: parallel_build = "" content = read_conf_file("make_args") if content and content[0]: buildpattern.extra_make = content[0] content = read_conf_file("make_install_args") if content and content[0]: buildpattern.extra_make_install = content[0] content = read_conf_file("install_macro") if content and content[0]: buildpattern.install_macro = content[0] content = read_conf_file("cmake_args") if content and content[0]: buildpattern.extra_cmake = content[0] content = read_conf_file("subdir") if content and content[0]: buildpattern.subdir = content[0] content = read_conf_file("build_pattern") if content and content[0]: buildpattern.set_build_pattern(content[0], 20) patches.autoreconf = False content = read_conf_file("make_check_command") if content and content[0]: test.tests_config = content[0] content = read_conf_file(tarball.name + ".license") if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: license.add_license(word) content = read_conf_file("golang_libpath") if content and content[0]: tarball.golibpath = content[0] print("golibpath : {}".format(tarball.golibpath)) if config_opts["use_clang"]: config_opts["funroll-loops"] = False buildreq.add_buildreq("llvm-dev") if config_opts["32bit"]: buildreq.add_buildreq("glibc-libc32") buildreq.add_buildreq("glibc-dev32") buildreq.add_buildreq("gcc-dev32") buildreq.add_buildreq("gcc-libgcc32") buildreq.add_buildreq("gcc-libstdc++32") buildpattern.make_install_append = read_conf_file("make_install_append") buildpattern.prep_append = read_conf_file("prep_append") profile_payload = read_conf_file("profile_payload")
def parse_config_files(path, bump): global extra_configure global keepstatic global asneeded global optimize_size global optimize_speed global insecure_build global config_files global config_path global parallel_build global license_fetch global license_show global git_uri global config_file config_path = path # Require autospec.conf for additional features if os.path.exists(config_file): config = configparser.ConfigParser(interpolation=None) config.read(config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) git_uri = config['autospec'].get('git', None) license_fetch = config['autospec'].get('license_fetch', None) license_show = config['autospec'].get('license_show', None) if not git_uri: print("Warning: Set [autospec][git] upstream template for git support") if not license_fetch: print("Warning: Set [autospec][license_fetch] uri for license fetch support") if not license_show: print("Warning: Set [autospec][license_show] uri for license link check support") wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " def write_default_conf_file(name, description): config_files.add(name) filename = path + "/" + name if os.path.isfile(filename): return with open(filename, "w") as f: f.write(wrapper.fill(description) + "\n") write_default_conf_file("buildreq_ban", "This file contains build requirements that get picked up but are undesirable. One entry per line, no whitespace.") write_default_conf_file("pkgconfig_ban", "This file contains pkgconfig build requirements that get picked up but are undesirable. One entry per line, no whitespace.") write_default_conf_file("buildreq_add", "This file contains additional build requirements that did not get picked up automatically. One name per line, no whitespace.") write_default_conf_file("pkgconfig_add", "This file contains additional pkgconfig build requirements that did not get picked up automatically. One name per line, no whitespace.") write_default_conf_file("excludes", "This file contains the output files that need %exclude. Full path names, one per line.") content = read_conf_file("release") if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = read_conf_file("buildreq_ban") for banned in content: print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("pkgconfig_ban") for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("buildreq_add") for extra in content: print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("pkgconfig_add") for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("excludes") for exclude in content: print("%%exclude for: %s." % exclude) files.excludes += content content = read_conf_file("extras") for extra in content: print("extras for: %s." % extra) files.extras += content content = read_conf_file("setuid") for suid in content: print("setuid for: %s." % suid) files.setuid += content content = read_conf_file("attrs") for line in content: attr = re.split('\(|\)|,', line) attr = [a.strip() for a in attr] filename = attr.pop() print("attr for: %s." % filename) files.attrs[filename] = attr patches.patches += read_conf_file("series") pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches.patches] cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am|Makefile.in)|(configure.ac|configure.in))\" %s" % \ " ".join(pfiles) if len(patches.patches) > 0 and call(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: patches.autoreconf = True content = read_conf_file("configure") extra_configure = " \\\n".join(content) if read_conf_file("keepstatic"): keepstatic = 1 buildpattern.disable_static = "" if read_conf_file("asneeded"): print("Disabling LD_AS_NEEDED\n") asneeded = 0 if read_conf_file("optimize_size"): optimize_size = True if read_conf_file("funroll-loops"): optimize_speed = True if read_conf_file("insecure_build"): insecure_build = True if read_conf_file("broken_parallel_build"): parallel_build = "" content = read_conf_file("make_args") if content and content[0]: buildpattern.extra_make = content[0] content = read_conf_file("make_install_args") if content and content[0]: buildpattern.extra_make_install = content[0] content = read_conf_file("install_macro") if content and content[0]: buildpattern.install_macro = content[0] content = read_conf_file("cmake_args") if content and content[0]: buildpattern.extra_cmake = content[0] content = read_conf_file("subdir") if content and content[0]: buildpattern.subdir = content[0] content = read_conf_file("build_pattern") if content and content[0]: buildpattern.set_build_pattern(content[0], 20) patches.autoreconf = False if read_conf_file("skip_test_suite"): test.skip_tests = True if read_conf_file("unit_tests_must_pass"): test.new_pkg = False content = read_conf_file("make_check_command") if content and content[0]: test.tests_config = content[0] content = read_conf_file("allow_test_failures") if content and content[0]: test.allow_test_failures = True content = read_conf_file(tarball.name + ".license") if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: license.add_license(word) buildpattern.make_install_append = read_conf_file("make_install_append")
def scan_for_configure(dirn): """ Scan the package directory for build files to determine build pattern """ count = 0 for dirpath, _, files in os.walk(dirn): default_score = 2 if dirpath == dirn else 1 if any(f.endswith(".go") for f in files): add_buildreq("go") buildpattern.set_build_pattern("golang", default_score) if "CMakeLists.txt" in files and "configure.ac" not in files: add_buildreq("cmake") buildpattern.set_build_pattern("cmake", default_score) if "configure" in files and os.access(dirpath + '/configure', os.X_OK): buildpattern.set_build_pattern("configure", default_score) if "requires.txt" in files: grab_python_requirements(dirpath + '/requires.txt') if "setup.py" in files: add_buildreq("python-dev") add_buildreq("python3-dev") add_buildreq("setuptools") add_buildreq("pbr") add_buildreq("pip") add_setup_py_requires(dirpath + '/setup.py') buildpattern.set_build_pattern("distutils23", default_score) if "Makefile.PL" in files or "Build.PL" in files: buildpattern.set_build_pattern("cpan", default_score) if "SConstruct" in files: add_buildreq("scons") add_buildreq("python-dev") buildpattern.set_build_pattern("scons", default_score) if "requirements.txt" in files: grab_python_requirements(dirpath + '/requirements.txt') if "meson.build" in files: add_buildreq("meson") add_buildreq("ninja") add_buildreq("python3") buildpattern.set_build_pattern("meson", default_score) for name in files: if name.lower() == "cargo.toml": parse_cargo_toml(os.path.join(dirpath, name)) if name.lower().startswith("configure."): parse_configure_ac(os.path.join(dirpath, name)) if name.lower().startswith( "rakefile") and buildpattern.default_pattern == "ruby": rakefile(os.path.join(dirpath, name)) if name.lower() == "makefile": buildpattern.set_build_pattern("make", default_score) if name.lower() == "autogen.sh": buildpattern.set_build_pattern("autogen", default_score) if name.lower() == "cmakelists.txt": buildpattern.set_build_pattern("cmake", default_score) can_reconf = os.path.exists(os.path.join(dirn, "configure.ac")) if not can_reconf: can_reconf = os.path.exists(os.path.join(dirn, "configure.in")) if can_reconf and config.autoreconf: print("Patches touch configure.*, adding autoreconf stage") for breq in autoreconf_reqs: add_buildreq(breq) else: config.autoreconf = False print("Buildreqs : ", end="") for lic in sorted(buildreqs): if count > 4: count = 0 print("\nBuildreqs : ", end="") count = count + 1 print(lic + " ", end="") print("")
def scan_for_configure(self, dirn, tname, dlpath, config): """Scan the package directory for build files to determine build pattern.""" if buildpattern.default_pattern == "distutils36": self.add_buildreq("buildreq-distutils36") elif buildpattern.default_pattern == "distutils3": self.add_buildreq("buildreq-distutils3") elif buildpattern.default_pattern == "golang": self.add_buildreq("buildreq-golang") elif buildpattern.default_pattern == "cmake": self.add_buildreq("buildreq-cmake") elif buildpattern.default_pattern == "configure": self.add_buildreq("buildreq-configure") elif buildpattern.default_pattern == "qmake": self.add_buildreq("buildreq-qmake") elif buildpattern.default_pattern == "cpan": self.add_buildreq("buildreq-cpan") elif buildpattern.default_pattern == "scons": self.add_buildreq("buildreq-scons") elif buildpattern.default_pattern == "R": self.add_buildreq("buildreq-R") self.parse_r_description(os.path.join(dirn, "DESCRIPTION"), config.os_packages) elif buildpattern.default_pattern == "phpize": self.add_buildreq("buildreq-php") elif buildpattern.default_pattern == "nginx": self.add_buildreq("buildreq-nginx") count = 0 for dirpath, _, files in os.walk(dirn): default_score = 2 if dirpath == dirn else 1 if any(f.endswith(".go") for f in files): self.add_buildreq("buildreq-golang") buildpattern.set_build_pattern("golang", default_score) if "go.mod" in files: if "Makefile" not in files: # Go packages usually have make build systems so far # so only use go directly if we can't find a Makefile buildpattern.set_build_pattern("golang", default_score) self.add_buildreq("buildreq-golang") if buildpattern.default_pattern == "golang-mod" or buildpattern.default_pattern == "godep": config.set_gopath = False mod_path = os.path.join(dirpath, "go.mod") reqs = parse_go_mod(mod_path) for req in reqs: # req[0] is a SCM url segment in the form, repo/XXX/dependency-name # req[1] is the version of the dependency pkg = "go-" + req[0].replace("/", "-") self.add_buildreq(pkg) if buildpattern.default_pattern == "godep": self.add_requires(pkg, config.os_packages) if "CMakeLists.txt" in files and "configure.ac" not in files: self.add_buildreq("buildreq-cmake") buildpattern.set_build_pattern("cmake", default_score) srcdir = os.path.abspath( os.path.join(dirn, "clr-build", config.cmake_srcdir or "..")) if os.path.samefile(dirpath, srcdir): self.parse_catkin_deps( os.path.join(srcdir, "CMakeLists.txt"), config.config_opts.get('32bit')) if "configure" in files and os.access(dirpath + '/configure', os.X_OK): buildpattern.set_build_pattern("configure", default_score) elif any(is_qmake_pro(f) for f in files): self.add_buildreq("buildreq-qmake") buildpattern.set_build_pattern("qmake", default_score) if "requires.txt" in files: self.grab_python_requirements(dirpath + '/requires.txt', config.os_packages) if "setup.py" in files: self.add_buildreq("buildreq-distutils3") self.add_setup_py_requires(dirpath + '/setup.py', config.os_packages) python_pattern = get_python_build_version_from_classifier( dirpath + '/setup.py') buildpattern.set_build_pattern(python_pattern, default_score) if "Makefile.PL" in files or "Build.PL" in files: buildpattern.set_build_pattern("cpan", default_score) self.add_buildreq("buildreq-cpan") if "SConstruct" in files: self.add_buildreq("buildreq-scons") buildpattern.set_build_pattern("scons", default_score) if "requirements.txt" in files: self.grab_python_requirements(dirpath + '/requirements.txt', config.os_packages) if "meson.build" in files: self.add_buildreq("buildreq-meson") buildpattern.set_build_pattern("meson", default_score) if "build.xml" in files: self.add_buildreq("apache-ant") buildpattern.set_build_pattern("ant", default_score) for name in files: if name.lower() == "cargo.toml" and dirpath == dirn: self.parse_cargo_toml(os.path.join(dirpath, name), config.os_packages) if name.lower().startswith("configure."): self.parse_configure_ac(os.path.join(dirpath, name), config.config_opts.get('32bit')) if name.lower().startswith( "rakefile") and buildpattern.default_pattern == "ruby": self.rakefile(os.path.join(dirpath, name), config.gems) if name.endswith( ".pro") and buildpattern.default_pattern == "qmake": self.qmake_profile(os.path.join(dirpath, name), config.qt_modules) if name.lower() == "makefile": buildpattern.set_build_pattern("make", default_score) if name.lower() == "autogen.sh": buildpattern.set_build_pattern("autogen", default_score) if name.lower() == "cmakelists.txt": buildpattern.set_build_pattern("cmake", default_score) if (name.lower() == "cmakelists.txt" or name.endswith(".cmake")) \ and buildpattern.default_pattern == "cmake": self.parse_cmake(os.path.join(dirpath, name), config.cmake_modules, config.config_opts.get('32bit')) can_reconf = os.path.exists(os.path.join(dirn, "configure.ac")) if not can_reconf: can_reconf = os.path.exists(os.path.join(dirn, "configure.in")) if can_reconf and config.autoreconf: print("Patches touch configure.*, adding autoreconf stage") for breq in self.autoreconf_reqs: self.add_buildreq(breq) else: config.autoreconf = False if buildpattern.default_pattern == "distutils3": # First look for a local override pypi_json = "" pypi_file = os.path.join(dlpath, "pypi.json") if os.path.isfile(pypi_file): with open(pypi_file, "r") as pfile: pypi_json = pfile.read() else: # Try and grab the pypi details for the package if config.alias: tname = config.alias pypi_name = pypidata.get_pypi_name(tname) pypi_json = pypidata.get_pypi_metadata(pypi_name) if pypi_json: try: package_pypi = json.loads(pypi_json) except json.JSONDecodeError: package_pypi = {} if package_pypi.get("name"): self.pypi_provides = package_pypi["name"] if package_pypi.get("requires"): self.pypi_requires = set(package_pypi["requires"]) if package_pypi.get("license"): # The license field is freeform, might be worth looking at though print( f"Pypi says the license is: {package_pypi['license']}") if package_pypi.get("summary"): specdescription.assign_summary(package_pypi["summary"], 4) print("Buildreqs : ", end="") for lic in sorted(self.buildreqs): if count > 4: count = 0 print("\nBuildreqs : ", end="") count = count + 1 print(lic + " ", end="") print("")
def parse_config_files(path, bump, filemanager, version): """Parse the various configuration files that may exist in the package directory.""" global extra_configure global extra_configure32 global extra_configure64 global extra_configure_avx2 global extra_configure_avx512 global config_files global parallel_build global license_fetch global license_show global git_uri global os_packages global urlban global config_file global profile_payload global config_opts global extra_make global extra32_make global extra_make_install global extra_make32_install global extra_cmake global cmake_srcdir global subdir global install_macro global disable_static global prep_prepend global build_prepend global make_prepend global install_prepend global install_append global patches global autoreconf global yum_conf global custom_desc global failed_pattern_dir packages_file = None # Require autospec.conf for additional features if os.path.exists(config_file): config = configparser.ConfigParser(interpolation=None) config.read(config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) git_uri = config['autospec'].get('git', None) license_fetch = config['autospec'].get('license_fetch', None) license_show = config['autospec'].get('license_show', None) packages_file = config['autospec'].get('packages_file', None) yum_conf = config['autospec'].get('yum_conf', None) failed_pattern_dir = config['autospec'].get('failed_pattern_dir', None) # support reading the local files relative to config_file if packages_file and not os.path.isabs(packages_file): packages_file = os.path.join(os.path.dirname(config_file), packages_file) if yum_conf and not os.path.isabs(yum_conf): yum_conf = os.path.join(os.path.dirname(config_file), yum_conf) if failed_pattern_dir and not os.path.isabs(failed_pattern_dir): failed_pattern_dir = os.path.join(os.path.dirname(config_file), failed_pattern_dir) if not packages_file: print( "Warning: Set [autospec][packages_file] path to package list file for " "requires validation") packages_file = os.path.join(os.path.dirname(config_file), "packages") urlban = config['autospec'].get('urlban', None) # Read values from options.conf (and deprecated files) and rewrite as necessary read_config_opts(path) if not git_uri: print( "Warning: Set [autospec][git] upstream template for remote git URI configuration" ) if not license_fetch: print( "Warning: Set [autospec][license_fetch] uri for license fetch support" ) if not license_show: print( "Warning: Set [autospec][license_show] uri for license link check support" ) if not yum_conf: print( "Warning: Set [autospec][yum_conf] path to yum.conf file for whatrequires validation" ) yum_conf = os.path.join(os.path.dirname(config_file), "image-creator/yum.conf") if packages_file: os_packages = set(read_conf_file(packages_file)) else: os_packages = set(read_conf_file("~/packages")) wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " def write_default_conf_file(name, description): """Write default configuration file with description to file name.""" config_files.add(name) filename = os.path.join(path, name) if os.path.isfile(filename): return write_out(filename, wrapper.fill(description) + "\n") write_default_conf_file( "buildreq_ban", "This file contains build requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") write_default_conf_file( "pkgconfig_ban", "This file contains pkgconfig build requirements that get picked up but" " are undesirable. One entry per line, no whitespace.") write_default_conf_file( "requires_ban", "This file contains runtime requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") write_default_conf_file( "buildreq_add", "This file contains additional build requirements that did not get " "picked up automatically. One name per line, no whitespace.") write_default_conf_file( "pkgconfig_add", "This file contains additional pkgconfig build requirements that did " "not get picked up automatically. One name per line, no whitespace.") write_default_conf_file( "requires_add", "This file contains additional runtime requirements that did not get " "picked up automatically. One name per line, no whitespace.") write_default_conf_file( "excludes", "This file contains the output files that need %exclude. Full path " "names, one per line.") content = read_conf_file(os.path.join(path, "release")) if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = read_conf_file(os.path.join(path, "buildreq_ban")) for banned in content: print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file(os.path.join(path, "pkgconfig_ban")) for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file(os.path.join(path, "requires_ban")) for banned in content: print("Banning runtime requirement: %s." % banned) buildreq.banned_requires.add(banned) content = read_conf_file(os.path.join(path, "buildreq_add")) for extra in content: print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file(os.path.join(path, "buildreq_cache")) if content and content[0] == version: for extra in content[1:]: print("Adding additional build (cache) requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file(os.path.join(path, "pkgconfig_add")) for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file(os.path.join(path, "requires_add")) for extra in content: print("Adding additional runtime requirement: %s." % extra) buildreq.add_requires(extra, override=True) content = read_conf_file(os.path.join(path, "excludes")) for exclude in content: print("%%exclude for: %s." % exclude) filemanager.excludes += content content = read_conf_file(os.path.join(path, "extras")) for extra in content: print("extras for : %s." % extra) filemanager.extras += content content = read_conf_file(os.path.join(path, "dev_extras")) for extra in content: print("dev for : %s." % extra) filemanager.dev_extras += content content = read_conf_file(os.path.join(path, "setuid")) for suid in content: print("setuid for : %s." % suid) filemanager.setuid += content content = read_conf_file(os.path.join(path, "attrs")) for line in content: attr = re.split(r'\(|\)|,', line) attr = [a.strip() for a in attr] filename = attr.pop() print("attr for: %s." % filename) filemanager.attrs[filename] = attr patches += read_conf_file(os.path.join(path, "series")) pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches] cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am)|(configure.ac|configure.in))\" %s" % " ".join( pfiles) # noqa: W605 if patches and call( cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: autoreconf = True if any(p.lower().startswith('cve-') for p in patches): config_opts['security_sensitive'] = True rewrite_config_opts(path) content = read_conf_file(os.path.join(path, "configure")) extra_configure = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure32")) extra_configure32 = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure64")) extra_configure64 = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure_avx2")) extra_configure_avx2 = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure_avx512")) extra_configure_avx512 = " \\\n".join(content) if config_opts["keepstatic"]: disable_static = "" if config_opts['broken_parallel_build']: parallel_build = "" content = read_conf_file(os.path.join(path, "make_args")) if content: extra_make = " \\\n".join(content) content = read_conf_file(os.path.join(path, "make32_args")) if content: extra32_make = " \\\n".join(content) content = read_conf_file(os.path.join(path, "make_install_args")) if content: extra_make_install = " \\\n".join(content) content = read_conf_file(os.path.join(path, "make32_install_args")) if content: extra_make32_install = " \\\n".join(content) content = read_conf_file(os.path.join(path, "install_macro")) if content and content[0]: install_macro = content[0] content = read_conf_file(os.path.join(path, "cmake_args")) if content: extra_cmake = " \\\n".join(content) content = read_conf_file(os.path.join(path, "cmake_srcdir")) if content and content[0]: cmake_srcdir = content[0] content = read_conf_file(os.path.join(path, "subdir")) if content and content[0]: subdir = content[0] content = read_conf_file(os.path.join(path, "build_pattern")) if content and content[0]: buildpattern.set_build_pattern(content[0], 20) autoreconf = False content = read_conf_file(os.path.join(path, "make_check_command")) if content: check.tests_config = '\n'.join(content) content = read_conf_file(os.path.join(path, tarball.name + ".license")) if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: license.add_license(word) content = read_conf_file(os.path.join(path, "golang_libpath")) if content and content[0]: tarball.golibpath = content[0] print("golibpath : {}".format(tarball.golibpath)) if config_opts['use_clang']: config_opts['funroll-loops'] = False buildreq.add_buildreq("llvm") if config_opts['32bit']: buildreq.add_buildreq("glibc-libc32") buildreq.add_buildreq("glibc-dev32") buildreq.add_buildreq("gcc-dev32") buildreq.add_buildreq("gcc-libgcc32") buildreq.add_buildreq("gcc-libstdc++32") prep_prepend = read_conf_file(os.path.join(path, "prep_prepend")) if os.path.isfile(os.path.join(path, "prep_append")): os.rename(os.path.join(path, "prep_append"), os.path.join(path, "build_prepend")) make_prepend = read_conf_file(os.path.join(path, "make_prepend")) build_prepend = read_conf_file(os.path.join(path, "build_prepend")) install_prepend = read_conf_file(os.path.join(path, "install_prepend")) if os.path.isfile(os.path.join(path, "make_install_append")): os.rename(os.path.join(path, "make_install_append"), os.path.join(path, "install_append")) install_append = read_conf_file(os.path.join(path, "install_append")) profile_payload = read_conf_file(os.path.join(path, "profile_payload")) custom_desc = read_conf_file(os.path.join(path, "description"))
def scan_for_configure(dirn): """Scan the package directory for build files to determine build pattern.""" if buildpattern.default_pattern == "distutils": add_buildreq("buildreq-distutils") elif buildpattern.default_pattern == "distutils23": add_buildreq("buildreq-distutils23") elif buildpattern.default_pattern == "distutils3": add_buildreq("buildreq-distutils3") elif buildpattern.default_pattern == "golang": add_buildreq("buildreq-golang") elif buildpattern.default_pattern == "cmake": add_buildreq("buildreq-cmake") elif buildpattern.default_pattern == "configure": add_buildreq("buildreq-configure") elif buildpattern.default_pattern == "qmake": add_buildreq("buildreq-qmake") elif buildpattern.default_pattern == "cpan": add_buildreq("buildreq-cpan") elif buildpattern.default_pattern == "scons": add_buildreq("buildreq-scons") elif buildpattern.default_pattern == "R": add_buildreq("buildreq-R") count = 0 for dirpath, _, files in os.walk(dirn): default_score = 2 if dirpath == dirn else 1 if any(f.endswith(".go") for f in files): add_buildreq("buildreq-golang") buildpattern.set_build_pattern("golang", default_score) if "CMakeLists.txt" in files and "configure.ac" not in files: add_buildreq("buildreq-cmake") buildpattern.set_build_pattern("cmake", default_score) srcdir = os.path.abspath(os.path.join(dirn, "clr-build", config.cmake_srcdir or "..")) if os.path.samefile(dirpath, srcdir): parse_catkin_deps(os.path.join(srcdir, "CMakeLists.txt")) if "configure" in files and os.access(dirpath + '/configure', os.X_OK): buildpattern.set_build_pattern("configure", default_score) elif any(f.endswith(".pro") for f in files): add_buildreq("buildreq-qmake") buildpattern.set_build_pattern("qmake", default_score) if "requires.txt" in files: grab_python_requirements(dirpath + '/requires.txt') if "setup.py" in files: add_buildreq("buildreq-distutils3") add_setup_py_requires(dirpath + '/setup.py') python_pattern = get_python_build_version_from_classifier(dirpath + '/setup.py') buildpattern.set_build_pattern(python_pattern, default_score) if "Makefile.PL" in files or "Build.PL" in files: buildpattern.set_build_pattern("cpan", default_score) add_buildreq("buildreq-cpan") if "SConstruct" in files: add_buildreq("buildreq-scons") buildpattern.set_build_pattern("scons", default_score) if "requirements.txt" in files: grab_python_requirements(dirpath + '/requirements.txt') if "meson.build" in files: add_buildreq("buildreq-meson") buildpattern.set_build_pattern("meson", default_score) for name in files: if name.lower() == "cargo.toml" and dirpath == dirn: parse_cargo_toml(os.path.join(dirpath, name)) if name.lower().startswith("configure."): parse_configure_ac(os.path.join(dirpath, name)) if name.lower().startswith("rakefile") and buildpattern.default_pattern == "ruby": rakefile(os.path.join(dirpath, name)) if name.endswith(".pro") and buildpattern.default_pattern == "qmake": qmake_profile(os.path.join(dirpath, name)) if name.lower() == "makefile": buildpattern.set_build_pattern("make", default_score) if name.lower() == "autogen.sh": buildpattern.set_build_pattern("autogen", default_score) if name.lower() == "cmakelists.txt": buildpattern.set_build_pattern("cmake", default_score) if (name.lower() == "cmakelists.txt" or name.endswith(".cmake")) \ and buildpattern.default_pattern == "cmake": parse_cmake(os.path.join(dirpath, name)) can_reconf = os.path.exists(os.path.join(dirn, "configure.ac")) if not can_reconf: can_reconf = os.path.exists(os.path.join(dirn, "configure.in")) if can_reconf and config.autoreconf: print("Patches touch configure.*, adding autoreconf stage") for breq in autoreconf_reqs: add_buildreq(breq) else: config.autoreconf = False print("Buildreqs : ", end="") for lic in sorted(buildreqs): if count > 4: count = 0 print("\nBuildreqs : ", end="") count = count + 1 print(lic + " ", end="") print("")