def test_add_license(self): """ Test add_license from valid string, Apache-2 should be translated to Apache-2.0 """ self.assertTrue(license.add_license('Apache-2')) self.assertIn('Apache-2.0', license.licenses)
def description_from_spec(specfile): """ Parse any existing RPM specfiles resulting score: 4 """ try: with open(specfile, 'r', encoding="latin-1") as specfd: lines = specfd.readlines() except FileNotFoundError: return specdesc = "" section = False for line in lines: if line.startswith("#"): continue if line.startswith("%"): section = False excludes = ["Copyright", "see ", "("] if line.startswith("License:") and not any(e in line for e in excludes): splits = line.split(":")[1:] words = ":".join(splits).strip() if words in config.license_translations: print("Adding license from spec:", words) license.add_license(words) else: words = clean_license_string(words).split() for word in words: if ":" not in word or word.startswith('http'): print("Adding license from spec:", word) license.add_license(word) if line.startswith("Summary: "): assign_summary(line[9:], 4) specdesc += line if section else "" # Check for %description after assigning the line to specdesc so the # %description string is not included if line.endswith("%description\n"): section = True if len(specdesc) > 10: assign_description(specdesc, 4)
def test_add_license(self): """ Test add_license from valid string, Apache-2 should be translated to Apache-2.0 """ conf = config.Config("") conf.setup_patterns() self.assertTrue(license.add_license('Apache-2', conf.license_translations, conf.license_blacklist)) self.assertIn('Apache-2.0', license.licenses)
def test_add_license_present(self): """ Test add_license from valid string, but license is already present in the licenses list. Should return False and should not modify the licenses list. GPL-3 translates to GPL-3.0. """ license.licenses.append('GPL-3.0') self.assertFalse(license.add_license('GPL-3')) self.assertEqual(['GPL-3.0'], license.licenses)
def description_from_pkginfo(pkginfo): """ Parse existing package info files resulting score: 4 """ try: with open(pkginfo, 'r', encoding="latin-1") as pkgfd: lines = pkgfd.readlines() except FileNotFoundError: return pkginfo = "" section = False for line in lines: if ":" in line and section: section = False excludes = ["Copyright", "see "] if line.lower().startswith("license:") and not any(e in line for e in excludes): splits = line.split(":")[1:] words = ":".join(splits).strip() if words in config.license_translations: print("Adding license from PKG-INFO:", words) license.add_license(words) else: words = clean_license_string(words).split() for word in words: if ":" not in word: print("Adding license from PKG-INFO:", word) license.add_license(word) for sub in ["Summary: ", "abstract: "]: if line.startswith(sub): assign_summary(line[len(sub):].strip(), 4) pkginfo += line if section else "" if line.startswith("Description:"): section = True if len(pkginfo) > 10: assign_description(pkginfo, 4)
def description_from_pkginfo(specfile): global default_description global default_summary global default_summary_score global default_description_score specdesc = "" phase = 0 file = open(specfile, "r", encoding="latin-1") for line in file.readlines(): if line.find(":") and phase == 1: phase = 0 if line.lower().startswith("license:") and line.find("Copyright") < 0 and line.find("see ") < 0: splits = line.split(":")[1:] words = ":".join(splits).strip() if words in license.license_translations: print("Adding license from PKG-INFO:", words) license.add_license(words) else: words = clean_license_string(words).split() for word in words: if word.find(":") < 0: print("Adding license from PKG-INFO:", word) license.add_license(word) if line.startswith("Summary: ") and default_summary_score < 4: default_summary = line[9:] default_summary_score = 4 if line.startswith("abstract:") and default_summary_score < 4: default_summary = line[9:].strip() default_summary_score = 4 if phase == 1: specdesc = specdesc + line if line.startswith("Description:"): phase = 1 if default_description_score < 4 and len(specdesc) > 10: default_description = specdesc default_description_score = 4 file.close()
def test_add_license_present(self): """ Test add_license from valid string, but license is already present in the licenses list. Should return True and should not modify the licenses list. GPL-3 translates to GPL-3.0. """ conf = config.Config("") conf.setup_patterns() license.licenses.append('GPL-3.0') self.assertTrue(license.add_license('GPL-3', conf.license_translations, conf.license_blacklist)) self.assertEqual(['GPL-3.0'], license.licenses)
def test_add_license_blacklisted(self): """ Test add_license from string in license_blacklist. Should return False and should not modify the licenses list. """ # sanity check to make sure the licenses list is empty before the later # assertIn() call self.assertEqual(license.licenses, []) self.assertFalse(license.add_license('License')) self.assertNotIn('License', license.licenses)
def description_from_spec(specfile): global default_description global default_summary global default_summary_score global default_description_score specdesc = "" phase = 0 file = open(specfile, "r", encoding="latin-1") for line in file.readlines(): if line.startswith("#"): continue if line.startswith("%"): phase = 0 if line.startswith("License:") and line.find("Copyright") < 0 and line.find("see ") < 0 and line.find("(") < 0: splits = line.split(":")[1:] words = ":".join(splits).strip() if words in license.license_translations: print("Adding license from spec:", words) license.add_license(words) else: words = clean_license_string(words).split() for word in words: if word.find(":") < 0 or word.startswith('http'): print("Adding license from spec:", word) license.add_license(word) if line.startswith("Summary: ") and default_summary_score < 4: default_summary = line[9:] default_summary_score = 4 if phase == 1: specdesc = specdesc + line if line.endswith("%description\n"): phase = 1 if default_description_score < 4: default_description = specdesc default_description_score = 4 file.close()
def test_add_license_blacklisted(self): """ Test add_license from string in license_blacklist. Should return False and should not modify the licenses list. """ conf = config.Config("") conf.setup_patterns() # sanity check to make sure the licenses list is empty before the later # assertIn() call self.assertEqual(license.licenses, []) self.assertFalse(license.add_license('License', conf.license_translations, conf.license_blacklist)) self.assertNotIn('License', license.licenses)
def parse_config_files(path, bump): global extra_configure global extra_configure32 global config_files global config_path global parallel_build global license_fetch global license_show global git_uri global urlban global config_file global profile_payload global config_opts config_path = path read_config_opts() # Require autospec.conf for additional features if os.path.exists(config_file): config = configparser.ConfigParser(interpolation=None) config.read(config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) git_uri = config["autospec"].get("git", None) license_fetch = config["autospec"].get("license_fetch", None) license_show = config["autospec"].get("license_show", None) urlban = config["autospec"].get("urlban", None) if not git_uri: print("Warning: Set [autospec][git] upstream template for git support") if not license_fetch: print("Warning: Set [autospec][license_fetch] uri for license fetch support") if not license_show: print("Warning: Set [autospec][license_show] uri for license link check support") wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " def write_default_conf_file(name, description): config_files.add(name) filename = path + "/" + name if os.path.isfile(filename): return with open(filename, "w") as f: f.write(wrapper.fill(description) + "\n") write_default_conf_file( "buildreq_ban", "This file contains build requirements that get picked up but are undesirable. One entry per line, no whitespace.", ) write_default_conf_file( "pkgconfig_ban", "This file contains pkgconfig build requirements that get picked up but are undesirable. One entry per line, no whitespace.", ) write_default_conf_file( "buildreq_add", "This file contains additional build requirements that did not get picked up automatically. One name per line, no whitespace.", ) write_default_conf_file( "pkgconfig_add", "This file contains additional pkgconfig build requirements that did not get picked up automatically. One name per line, no whitespace.", ) write_default_conf_file( "excludes", "This file contains the output files that need %exclude. Full path names, one per line." ) content = read_conf_file("release") if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = read_conf_file("buildreq_ban") for banned in content: print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("pkgconfig_ban") for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("buildreq_add") for extra in content: print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("pkgconfig_add") for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("excludes") for exclude in content: print("%%exclude for: %s." % exclude) files.excludes += content content = read_conf_file("extras") for extra in content: print("extras for: %s." % extra) files.extras += content content = read_conf_file("setuid") for suid in content: print("setuid for: %s." % suid) files.setuid += content content = read_conf_file("attrs") for line in content: attr = re.split("\(|\)|,", line) attr = [a.strip() for a in attr] filename = attr.pop() print("attr for: %s." % filename) files.attrs[filename] = attr patches.patches += read_conf_file("series") pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches.patches] cmd = 'egrep "(\+\+\+|\-\-\-).*((Makefile.am)|(configure.ac|configure.in))" %s' % " ".join(pfiles) if len(patches.patches) > 0 and call(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: patches.autoreconf = True content = read_conf_file("configure") extra_configure = " \\\n".join(content) content = read_conf_file("configure32") extra_configure32 = " \\\n".join(content) if config_opts["keepstatic"]: buildpattern.disable_static = "" if config_opts["broken_parallel_build"]: parallel_build = "" content = read_conf_file("make_args") if content and content[0]: buildpattern.extra_make = content[0] content = read_conf_file("make_install_args") if content and content[0]: buildpattern.extra_make_install = content[0] content = read_conf_file("install_macro") if content and content[0]: buildpattern.install_macro = content[0] content = read_conf_file("cmake_args") if content and content[0]: buildpattern.extra_cmake = content[0] content = read_conf_file("subdir") if content and content[0]: buildpattern.subdir = content[0] content = read_conf_file("build_pattern") if content and content[0]: buildpattern.set_build_pattern(content[0], 20) patches.autoreconf = False content = read_conf_file("make_check_command") if content and content[0]: test.tests_config = content[0] content = read_conf_file(tarball.name + ".license") if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: license.add_license(word) content = read_conf_file("golang_libpath") if content and content[0]: tarball.golibpath = content[0] print("golibpath : {}".format(tarball.golibpath)) if config_opts["use_clang"]: config_opts["funroll-loops"] = False buildreq.add_buildreq("llvm-dev") if config_opts["32bit"]: buildreq.add_buildreq("glibc-libc32") buildreq.add_buildreq("glibc-dev32") buildreq.add_buildreq("gcc-dev32") buildreq.add_buildreq("gcc-libgcc32") buildreq.add_buildreq("gcc-libstdc++32") buildpattern.make_install_append = read_conf_file("make_install_append") buildpattern.prep_append = read_conf_file("prep_append") profile_payload = read_conf_file("profile_payload")
def package(args, url, name, archives, workingdir, infile_dict): check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(url, name, args.version, args.target, archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) if args.prep_only: write_prep(workingdir) exit(0) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, tarball.name, tarball.version, tarball.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_build_log(build.download_path, build.round) test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED) if args.git: git.commit_to_git(build.download_path) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", default="", nargs="?", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument("-p", "--prep-only", action="store_true", default=False, help="Only perform preparatory work on package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") parser.add_argument("-C", "--cleanup", dest="cleanup", action="store_true", default=False, help="Clean up mock chroot after building the package") parser.add_argument("--infile", action="store", dest="infile", default="", help="type of input file for .specfile creation") parser.add_argument( "-m", "--mock-config", action="store", default="clear", help="Value to pass with Mock's -r option. Defaults to " "\"clear\", meaning that Mock will use " "/etc/mock/clear.cfg.") args = parser.parse_args() name, url, archives = read_old_metadata() name = args.name or name url = args.url or url archives = args.archives or archives infile_dict = {} if args.infile: infile_dict = infile_handler.infile_reader(args.infile, name) if not url: try: url = infile_dict.get('URL') except: pass else: print_infile("Source url found: {}".format(url)) if infile_dict.get("LICENSE"): license.add_license(infile_dict.get("LICENSE")) print_infile("License added: {}".format( infile_dict.get("LICENSE"))) if not url: parser.error( argparse.ArgumentTypeError( "the url argument or options.conf['package']['url'] is required" )) if len(archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives or options.conf['package']['archives'] requires an " "even number of arguments")) if args.prep_only: package(args, url, name, archives, "./workingdir", infile_dict) else: with tempfile.TemporaryDirectory() as workingdir: package(args, url, name, archives, workingdir, infile_dict)
def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) package = build.Build(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(package.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open( os.path.join(package.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(package.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(package.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, package.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(package.download_path, _dir, conf.transforms) add_sources(package.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, package.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(package.download_path) while 1: package.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(package.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(package.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 if package.round > 20 or package.must_restart == 0: break save_mock_logs(package.download_path, package.round) check.check_regression(package.download_path, conf.config_opts['skip_tests']) if package.success == 0: conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(package.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(package.download_path + "/release", content.release + "\n") # record logcheck output logcheck(package.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content, package) conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(package.download_path, conf, content.name, package.success) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(args.url, args.name, args.version, args.target, args.archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(args.url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)
def parse_config_files(self, path, bump, filemanager, version, requirements): """Parse the various configuration files that may exist in the package directory.""" packages_file = None # Require autospec.conf for additional features if os.path.exists(self.config_file): config = configparser.ConfigParser(interpolation=None) config.read(self.config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) self.git_uri = config['autospec'].get('git', None) self.license_fetch = config['autospec'].get('license_fetch', None) self.license_show = config['autospec'].get('license_show', None) packages_file = config['autospec'].get('packages_file', None) self.yum_conf = config['autospec'].get('yum_conf', None) self.failed_pattern_dir = config['autospec'].get( 'failed_pattern_dir', None) # support reading the local files relative to config_file if packages_file and not os.path.isabs(packages_file): packages_file = os.path.join(os.path.dirname(self.config_file), packages_file) if self.yum_conf and not os.path.isabs(self.yum_conf): self.yum_conf = os.path.join(os.path.dirname(self.config_file), self.yum_conf) if self.failed_pattern_dir and not os.path.isabs( self.failed_pattern_dir): self.failed_pattern_dir = os.path.join( os.path.dirname(self.config_file), self.failed_pattern_dir) if not packages_file: print( "Warning: Set [autospec][packages_file] path to package list file for " "requires validation") packages_file = os.path.join(os.path.dirname(self.config_file), "packages") self.urlban = config['autospec'].get('urlban', None) # Read values from options.conf (and deprecated files) and rewrite as necessary self.read_config_opts(path) if not self.git_uri: print( "Warning: Set [autospec][git] upstream template for remote git URI configuration" ) if not self.license_fetch: print( "Warning: Set [autospec][license_fetch] uri for license fetch support" ) if not self.license_show: print( "Warning: Set [autospec][license_show] uri for license link check support" ) if not self.yum_conf: print( "Warning: Set [autospec][yum_conf] path to yum.conf file for whatrequires validation" ) self.yum_conf = os.path.join(os.path.dirname(self.config_file), "image-creator/yum.conf") if packages_file: self.os_packages = set( self.read_conf_file(packages_file, track=False)) else: self.os_packages = set( self.read_conf_file("~/packages", track=False)) wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " self.write_default_conf_file( path, "buildreq_ban", wrapper, "This file contains build requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") self.write_default_conf_file( path, "pkgconfig_ban", wrapper, "This file contains pkgconfig build requirements that get picked up but" " are undesirable. One entry per line, no whitespace.") self.write_default_conf_file( path, "requires_ban", wrapper, "This file contains runtime requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") self.write_default_conf_file( path, "buildreq_add", wrapper, "This file contains additional build requirements that did not get " "picked up automatically. One name per line, no whitespace.") self.write_default_conf_file( path, "pkgconfig_add", wrapper, "This file contains additional pkgconfig build requirements that did " "not get picked up automatically. One name per line, no whitespace." ) self.write_default_conf_file( path, "requires_add", wrapper, "This file contains additional runtime requirements that did not get " "picked up automatically. One name per line, no whitespace.") self.write_default_conf_file( path, "excludes", wrapper, "This file contains the output files that need %exclude. Full path " "names, one per line.") content = self.read_conf_file(os.path.join(path, "release")) if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = self.read_conf_file(os.path.join(path, "extra_sources")) for source in content: fields = source.split(maxsplit=1) print("Adding additional source file: %s" % fields[0]) self.config_files.add(os.path.basename(fields[0])) self.extra_sources.append(fields) content = self.read_conf_file(os.path.join(path, "buildreq_ban")) for banned in content: print("Banning build requirement: %s." % banned) requirements.banned_buildreqs.add(banned) requirements.buildreqs.discard(banned) requirements.buildreqs_cache.discard(banned) content = self.read_conf_file(os.path.join(path, "pkgconfig_ban")) for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) requirements.banned_buildreqs.add(banned) requirements.buildreqs.discard(banned) requirements.buildreqs_cache.discard(banned) content = self.read_conf_file(os.path.join(path, "requires_ban")) for banned in content: print("Banning runtime requirement: %s." % banned) requirements.banned_requires.add(banned) requirements.requires.discard(banned) content = self.read_conf_file(os.path.join(path, "buildreq_add")) for extra in content: print("Adding additional build requirement: %s." % extra) requirements.add_buildreq(extra) cache_file = os.path.join(path, "buildreq_cache") content = self.read_conf_file(cache_file) if content and content[0] == version: for extra in content[1:]: print("Adding additional build (cache) requirement: %s." % extra) requirements.add_buildreq(extra) else: try: os.unlink(cache_file) except FileNotFoundError: pass except Exception as e: print_warning(f"Unable to remove buildreq_cache file: {e}") content = self.read_conf_file(os.path.join(path, "pkgconfig_add")) for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) requirements.add_buildreq(extra) content = self.read_conf_file(os.path.join(path, "requires_add")) for extra in content: print("Adding additional runtime requirement: %s." % extra) requirements.add_requires(extra, self.os_packages, override=True) content = self.read_conf_file(os.path.join(path, "excludes")) for exclude in content: print("%%exclude for: %s." % exclude) filemanager.excludes += content content = self.read_conf_file(os.path.join(path, "extras")) for extra in content: print("extras for : %s." % extra) filemanager.extras += content for fname in os.listdir(path): if not re.search('.+_extras$', fname) or fname == "dev_extras": continue content = {} content['files'] = self.read_conf_file(os.path.join(path, fname)) if not content: print_warning(f"Error reading custom extras file: {fname}") continue req_file = os.path.join(path, f'{fname}_requires') if os.path.isfile(req_file): content['requires'] = self.read_conf_file(req_file) name = fname[:-len("_extras")] print(f"extras-{name} for {content['files']}") filemanager.custom_extras["extras-" + f"{name}"] = content content = self.read_conf_file(os.path.join(path, "dev_extras")) for extra in content: print("dev for : %s." % extra) filemanager.dev_extras += content content = self.read_conf_file(os.path.join(path, "setuid")) for suid in content: print("setuid for : %s." % suid) filemanager.setuid += content content = self.read_conf_file(os.path.join(path, "attrs")) for line in content: attr = line.split() filename = attr.pop() print("%attr({0},{1},{2}) for: {3}".format(attr[0], attr[1], attr[2], filename)) filemanager.attrs[filename] = attr self.patches += self.read_conf_file(os.path.join(path, "series")) pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in self.patches] cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am)|(aclocal.m4)|(configure.ac|configure.in))\" %s" % " ".join( pfiles) # noqa: W605 if self.patches and call(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: self.autoreconf = True # Parse the version-specific patch lists update_security_sensitive = False for version in self.versions: self.verpatches[version] = self.read_conf_file( os.path.join(path, '.'.join(['series', version]))) if any(p.lower().startswith('cve-') for p in self.verpatches[version]): update_security_sensitive = True if any(p.lower().startswith('cve-') for p in self.patches): update_security_sensitive = True if update_security_sensitive: self.config_opts['security_sensitive'] = True self.rewrite_config_opts(path) content = self.read_conf_file(os.path.join(path, "configure")) self.extra_configure = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure32")) self.extra_configure32 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure64")) self.extra_configure64 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure_avx2")) self.extra_configure_avx2 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure_avx512")) self.extra_configure_avx512 = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "configure_openmpi")) self.extra_configure_openmpi = " \\\n".join(content) if self.config_opts["keepstatic"]: self.disable_static = "" if self.config_opts['broken_parallel_build']: self.parallel_build = "" content = self.read_conf_file(os.path.join(path, "make_args")) if content: self.extra_make = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "make32_args")) if content: self.extra32_make = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "make_install_args")) if content: self.extra_make_install = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "make32_install_args")) if content: self.extra_make32_install = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "install_macro")) if content and content[0]: self.install_macro = content[0] content = self.read_conf_file(os.path.join(path, "cmake_args")) if content: self.extra_cmake = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "cmake_args_openmpi")) if content: self.extra_cmake_openmpi = " \\\n".join(content) content = self.read_conf_file(os.path.join(path, "cmake_srcdir")) if content and content[0]: self.cmake_srcdir = content[0] content = self.read_conf_file(os.path.join(path, "subdir")) if content and content[0]: self.subdir = content[0] content = self.read_conf_file(os.path.join(path, "build_pattern")) if content and content[0]: buildpattern.set_build_pattern(content[0], 20) self.autoreconf = False content = self.read_script_file( os.path.join(path, "make_check_command")) if content: check.tests_config = '\n'.join(content) content = self.read_conf_file( os.path.join(path, tarball.name + ".license")) if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: if not license.add_license(word, self.license_translations, self.license_blacklist): print_warning( "{}: blacklisted license {} ignored.".format( tarball.name + ".license", word)) content = self.read_conf_file(os.path.join(path, "golang_libpath")) if content and content[0]: tarball.golibpath = content[0] print("golibpath : {}".format(tarball.golibpath)) if self.config_opts['use_clang']: self.config_opts['funroll-loops'] = False requirements.add_buildreq("llvm") if self.config_opts['32bit']: requirements.add_buildreq("glibc-libc32") requirements.add_buildreq("glibc-dev32") requirements.add_buildreq("gcc-dev32") requirements.add_buildreq("gcc-libgcc32") requirements.add_buildreq("gcc-libstdc++32") if self.config_opts['openmpi']: requirements.add_buildreq("openmpi-dev") requirements.add_buildreq("modules") # MPI testsuites generally require "openssh" requirements.add_buildreq("openssh") self.prep_prepend = self.read_script_file( os.path.join(path, "prep_prepend")) if os.path.isfile(os.path.join(path, "prep_append")): os.rename(os.path.join(path, "prep_append"), os.path.join(path, "build_prepend")) self.make_prepend = self.read_script_file( os.path.join(path, "make_prepend")) self.build_prepend = self.read_script_file( os.path.join(path, "build_prepend")) self.build_append = self.read_script_file( os.path.join(path, "build_append")) self.install_prepend = self.read_script_file( os.path.join(path, "install_prepend")) if os.path.isfile(os.path.join(path, "make_install_append")): os.rename(os.path.join(path, "make_install_append"), os.path.join(path, "install_append")) self.install_append = self.read_script_file( os.path.join(path, "install_append")) self.service_restart = self.read_conf_file( os.path.join(path, "service_restart")) self.profile_payload = self.read_script_file( os.path.join(path, "profile_payload")) self.custom_desc = self.read_conf_file( os.path.join(path, "description")) self.custom_summ = self.read_conf_file(os.path.join(path, "summary"))
def parse_config_files(path, bump): global extra_configure global keepstatic global asneeded global optimize_size global optimize_speed global insecure_build global config_files global config_path global parallel_build global license_fetch global license_show global git_uri global config_file config_path = path # Require autospec.conf for additional features if os.path.exists(config_file): config = configparser.ConfigParser(interpolation=None) config.read(config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) git_uri = config['autospec'].get('git', None) license_fetch = config['autospec'].get('license_fetch', None) license_show = config['autospec'].get('license_show', None) if not git_uri: print("Warning: Set [autospec][git] upstream template for git support") if not license_fetch: print("Warning: Set [autospec][license_fetch] uri for license fetch support") if not license_show: print("Warning: Set [autospec][license_show] uri for license link check support") wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " def write_default_conf_file(name, description): config_files.add(name) filename = path + "/" + name if os.path.isfile(filename): return with open(filename, "w") as f: f.write(wrapper.fill(description) + "\n") write_default_conf_file("buildreq_ban", "This file contains build requirements that get picked up but are undesirable. One entry per line, no whitespace.") write_default_conf_file("pkgconfig_ban", "This file contains pkgconfig build requirements that get picked up but are undesirable. One entry per line, no whitespace.") write_default_conf_file("buildreq_add", "This file contains additional build requirements that did not get picked up automatically. One name per line, no whitespace.") write_default_conf_file("pkgconfig_add", "This file contains additional pkgconfig build requirements that did not get picked up automatically. One name per line, no whitespace.") write_default_conf_file("excludes", "This file contains the output files that need %exclude. Full path names, one per line.") content = read_conf_file("release") if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = read_conf_file("buildreq_ban") for banned in content: print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("pkgconfig_ban") for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file("buildreq_add") for extra in content: print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("pkgconfig_add") for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file("excludes") for exclude in content: print("%%exclude for: %s." % exclude) files.excludes += content content = read_conf_file("extras") for extra in content: print("extras for: %s." % extra) files.extras += content content = read_conf_file("setuid") for suid in content: print("setuid for: %s." % suid) files.setuid += content content = read_conf_file("attrs") for line in content: attr = re.split('\(|\)|,', line) attr = [a.strip() for a in attr] filename = attr.pop() print("attr for: %s." % filename) files.attrs[filename] = attr patches.patches += read_conf_file("series") pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches.patches] cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am|Makefile.in)|(configure.ac|configure.in))\" %s" % \ " ".join(pfiles) if len(patches.patches) > 0 and call(cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: patches.autoreconf = True content = read_conf_file("configure") extra_configure = " \\\n".join(content) if read_conf_file("keepstatic"): keepstatic = 1 buildpattern.disable_static = "" if read_conf_file("asneeded"): print("Disabling LD_AS_NEEDED\n") asneeded = 0 if read_conf_file("optimize_size"): optimize_size = True if read_conf_file("funroll-loops"): optimize_speed = True if read_conf_file("insecure_build"): insecure_build = True if read_conf_file("broken_parallel_build"): parallel_build = "" content = read_conf_file("make_args") if content and content[0]: buildpattern.extra_make = content[0] content = read_conf_file("make_install_args") if content and content[0]: buildpattern.extra_make_install = content[0] content = read_conf_file("install_macro") if content and content[0]: buildpattern.install_macro = content[0] content = read_conf_file("cmake_args") if content and content[0]: buildpattern.extra_cmake = content[0] content = read_conf_file("subdir") if content and content[0]: buildpattern.subdir = content[0] content = read_conf_file("build_pattern") if content and content[0]: buildpattern.set_build_pattern(content[0], 20) patches.autoreconf = False if read_conf_file("skip_test_suite"): test.skip_tests = True if read_conf_file("unit_tests_must_pass"): test.new_pkg = False content = read_conf_file("make_check_command") if content and content[0]: test.tests_config = content[0] content = read_conf_file("allow_test_failures") if content and content[0]: test.allow_test_failures = True content = read_conf_file(tarball.name + ".license") if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: license.add_license(word) buildpattern.make_install_append = read_conf_file("make_install_append")
def main(): parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", nargs=1, action="store", dest="name", default="", help="Override the package name") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", nargs=1, dest="config", action="store", default="common/autospec.conf", help="Set configuration file to use") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error(argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # build.setup_patterns() tarball.download_tarball(args.url, args.name, args.archives) dir = tarball.path if args.license_only: try: with open(os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if word.find(":") < 0: license.add_license(word) except: pass license.scan_for_licenses(name, dir) exit(0) config.config_file = args.config config.parse_config_files(build.download_path, args.bump) buildreq.scan_for_configure(name, dir, build.download_path) specdescription.scan_for_description(name, dir) license.scan_for_licenses(name, dir) add_sources(build.download_path, args.archives) test.scan_for_tests(dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # write_spec(build.download_path + "/" + tarball.name + ".spec") print("\n") while 1: build.package() write_spec(build.download_path + "/" + tarball.name + ".spec") files.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print("Build failed") return with open(build.download_path + "/release", "w") as fp: fp.write(tarball.release + "\n") if args.git: git.commit_to_git(build.download_path)
def package( args, url, name, archives, archives_from_git, workingdir, download_from_git, branch, redownload_from_git, redownload_archive, force_module, force_fullclone, mock_dir, short_circuit, do_file_restart, ): """Entry point for building a package with autospec.""" conf = config.Config(args.target) conf.parse_config_files_early() if util.debugging: print_debug(f"url 1: {url}") new_archives_from_git = [] name_re_escaped = re.escape(name) # Download the source from git if necessary if download_from_git: giturl = url found_file = False fileslist = None download_file_full_path = "" if util.debugging: print_debug(f"url 2: {url}") print_debug(f"BRANCH 2: {branch}") # filename_re = re.compile(r"^{}{}".format(name, r"(-|-.)(\d+)(\.\d+)+\.tar\.gz")) filename_re = re.compile(r"^{}{}".format(name_re_escaped, r"-.*\.tar\.gz")) if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"package_path 11: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 21: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 11: {download_file_full_path}") print_debug(f"giturl 11: {giturl}") else: package_path = f"packages/{name}" if util.debugging: print_debug(f"package_path 12: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 22: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 12: {download_file_full_path}") print_debug(f"giturl 12: {giturl}") else: giturl = "" url = download.do_curl_get_effective_url(url) if archives_from_git: arch_url = [] arch_destination = [] arch_branch = [] arch_submodule = [] arch_forcefullclone = [] if util.debugging: print_debug(f"ARCHIVES_GIT 2: {archives_from_git}") print_debug(f"archives in options.conf: {archives}\n\n") archives_re = re.compile(r"^file:\/\/") index_f = [] for index, url_entry in enumerate(archives): if archives_re.search(url_entry): index_f.append(index) if util.debugging: for x in range(len(index_f) - 1, -1, -1): print_debug( f"rm {index_f[x]}:{archives[index_f[x]]} {index_f[x] + 1}:{archives[index_f[x] + 1]}" ) for x in sorted(range(len(index_f) - 1, -1, -1), reverse=True): del archives[index_f[x]:index_f[x] + 2] if util.debugging: print_debug(f"archives in options.conf: {archives}") for aurl, dest, br, sm, ffc in zip(archives_from_git[::5], archives_from_git[1::5], archives_from_git[2::5], archives_from_git[3::5], archives_from_git[4::5]): arch_url.append(aurl) arch_destination.append(dest) arch_branch.append(br) arch_submodule.append(sm) arch_forcefullclone.append(ffc) if util.debugging: print_debug( f"FOR ZIP {arch_url[-1]} - {arch_destination[-1]} - {arch_branch[-1]} - {arch_submodule[-1]} - {arch_forcefullclone[-1]}" ) for index, new_arch_url in enumerate(arch_url, start=0): found_file = False fileslist = [] download_file_full_path = "" arch_name = os.path.splitext(os.path.basename(new_arch_url))[0] arch_name_re_escaped = re.escape(arch_name) filename_re = re.compile(r"^{}{}".format(arch_name_re_escaped, r"-.*\.tar\.gz")) if util.debugging: print_debug(f"arch_name: {arch_name}") if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"archive package_path 1: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"filename: {filename.name}") print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 1: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 1: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 1: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) else: package_path = f"packages/{name}" if util.debugging: print_debug(f"archive package_path 2: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 2: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 2: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 2: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) if util.debugging: print_debug(f"new_archives_from_git: {new_archives_from_git}\n") #check_requirements(args.git) conf.detect_build_from_url(url) package = build.Build() # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package, mock_dir, short_circuit) if util.debugging: print_debug(f"url 4: {url}") print_debug(f"archives 4: {archives}") print_debug(f"new_archives_from_git 4: {new_archives_from_git}") content = tarball.Content(url, name, args.version, archives, conf, workingdir, giturl, download_from_git, branch, new_archives_from_git, force_module, force_fullclone) content.process(filemanager) conf.create_versions(content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req(conf) conf.parse_config_files(args.bump, filemanager, content.version, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) if args.license_only: try: with open( os.path.join(conf.download_path, content.name + ".license"), "r", ) as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, name) exit(0) if short_circuit == "prep" or short_circuit is None: requirements.scan_for_configure(_dir, content.name, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(conf.download_path, _dir, conf.transforms) conf.add_sources(archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content, mock_dir, short_circuit) filemanager.load_specfile(specfile) load_specfile(conf, specfile) if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) if short_circuit == "prep" or short_circuit is None: conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) # conf.create_reqs_cache(content.version, requirements.reqs_cache) specfile.write_spec() filemanager.load_specfile_information(specfile, content) if short_circuit == "prep": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/SRPMS/" ) util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/BUILD/" ) if short_circuit == "install": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/RPMS/" ) while 1: package.package( filemanager, args.mock_config, args.mock_opts, conf, requirements, content, mock_dir, short_circuit, do_file_restart, args.cleanup, ) if (short_circuit != package.short_circuit): print_info(f"short_circuit: {short_circuit}") print_info(f"package.short_circuit: {package.short_circuit}") short_circuit = package.short_circuit print_info(f"new short_circuit: {short_circuit}") filemanager.load_specfile_information(specfile, content) filemanager.load_specfile(specfile) specfile.write_spec() filemanager.newfiles_printed = 0 mock_chroot = f"{mock_dir}/clear-{package.uniqueext}/root/builddir/build/BUILDROOT/{content.name}-{content.version}-{content.release}.x86_64" if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 print_info(f"filemanager.clean_directories({mock_chroot})") if do_file_restart: if package.round > 20 or (package.must_restart == 0 and package.file_restart == 0): if (short_circuit == "install"): print_info(f"short_circuit: {short_circuit}") print_info( f"package.short_circuit: {package.short_circuit}") short_circuit = "binary" print_info(f"new short_circuit: {short_circuit}") continue else: break else: if (package.round > 20 or package.must_restart == 0): break save_mock_logs(conf.download_path, package.round) #if short_circuit is None or short_circuit == "install": #check.check_regression(conf.download_path, conf.config_opts["skip_tests"]) #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) #conf.create_reqs_cache(content.version, requirements.reqs_cache) if package.success == 0: #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif (package.success == 1): if os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass if (short_circuit is None): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(conf.download_path + "/release", content.release + "\n") # record logcheck output #logcheck(conf.download_path) #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) elif (short_circuit == "prep"): write_out(conf.download_path + "/release", content.release + "\n") #elif (short_circuit == "build"): # record logcheck output #logcheck(conf.download_path) #elif (short_circuit == "install"): ## record logcheck output #logcheck(conf.download_path) elif (short_circuit == "binary"): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) #write_out(conf.download_path + "/release", content.release + "\n") #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) #else: #print("To commit your changes, git add the relevant files and run 'git commit -F commitmsg'") link_new_rpms_here()
def parse_config_files(path, bump, filemanager, version): """Parse the various configuration files that may exist in the package directory.""" global extra_configure global extra_configure32 global extra_configure64 global extra_configure_avx2 global extra_configure_avx512 global config_files global parallel_build global license_fetch global license_show global git_uri global os_packages global urlban global config_file global profile_payload global config_opts global extra_make global extra32_make global extra_make_install global extra_make32_install global extra_cmake global cmake_srcdir global subdir global install_macro global disable_static global prep_prepend global build_prepend global make_prepend global install_prepend global install_append global patches global autoreconf global yum_conf global custom_desc global failed_pattern_dir packages_file = None # Require autospec.conf for additional features if os.path.exists(config_file): config = configparser.ConfigParser(interpolation=None) config.read(config_file) if "autospec" not in config.sections(): print("Missing autospec section..") sys.exit(1) git_uri = config['autospec'].get('git', None) license_fetch = config['autospec'].get('license_fetch', None) license_show = config['autospec'].get('license_show', None) packages_file = config['autospec'].get('packages_file', None) yum_conf = config['autospec'].get('yum_conf', None) failed_pattern_dir = config['autospec'].get('failed_pattern_dir', None) # support reading the local files relative to config_file if packages_file and not os.path.isabs(packages_file): packages_file = os.path.join(os.path.dirname(config_file), packages_file) if yum_conf and not os.path.isabs(yum_conf): yum_conf = os.path.join(os.path.dirname(config_file), yum_conf) if failed_pattern_dir and not os.path.isabs(failed_pattern_dir): failed_pattern_dir = os.path.join(os.path.dirname(config_file), failed_pattern_dir) if not packages_file: print( "Warning: Set [autospec][packages_file] path to package list file for " "requires validation") packages_file = os.path.join(os.path.dirname(config_file), "packages") urlban = config['autospec'].get('urlban', None) # Read values from options.conf (and deprecated files) and rewrite as necessary read_config_opts(path) if not git_uri: print( "Warning: Set [autospec][git] upstream template for remote git URI configuration" ) if not license_fetch: print( "Warning: Set [autospec][license_fetch] uri for license fetch support" ) if not license_show: print( "Warning: Set [autospec][license_show] uri for license link check support" ) if not yum_conf: print( "Warning: Set [autospec][yum_conf] path to yum.conf file for whatrequires validation" ) yum_conf = os.path.join(os.path.dirname(config_file), "image-creator/yum.conf") if packages_file: os_packages = set(read_conf_file(packages_file)) else: os_packages = set(read_conf_file("~/packages")) wrapper = textwrap.TextWrapper() wrapper.initial_indent = "# " wrapper.subsequent_indent = "# " def write_default_conf_file(name, description): """Write default configuration file with description to file name.""" config_files.add(name) filename = os.path.join(path, name) if os.path.isfile(filename): return write_out(filename, wrapper.fill(description) + "\n") write_default_conf_file( "buildreq_ban", "This file contains build requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") write_default_conf_file( "pkgconfig_ban", "This file contains pkgconfig build requirements that get picked up but" " are undesirable. One entry per line, no whitespace.") write_default_conf_file( "requires_ban", "This file contains runtime requirements that get picked up but are " "undesirable. One entry per line, no whitespace.") write_default_conf_file( "buildreq_add", "This file contains additional build requirements that did not get " "picked up automatically. One name per line, no whitespace.") write_default_conf_file( "pkgconfig_add", "This file contains additional pkgconfig build requirements that did " "not get picked up automatically. One name per line, no whitespace.") write_default_conf_file( "requires_add", "This file contains additional runtime requirements that did not get " "picked up automatically. One name per line, no whitespace.") write_default_conf_file( "excludes", "This file contains the output files that need %exclude. Full path " "names, one per line.") content = read_conf_file(os.path.join(path, "release")) if content and content[0]: r = int(content[0]) if bump: r += 1 tarball.release = str(r) print("Release :", tarball.release) content = read_conf_file(os.path.join(path, "buildreq_ban")) for banned in content: print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file(os.path.join(path, "pkgconfig_ban")) for banned in content: banned = "pkgconfig(%s)" % banned print("Banning build requirement: %s." % banned) buildreq.banned_buildreqs.add(banned) content = read_conf_file(os.path.join(path, "requires_ban")) for banned in content: print("Banning runtime requirement: %s." % banned) buildreq.banned_requires.add(banned) content = read_conf_file(os.path.join(path, "buildreq_add")) for extra in content: print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file(os.path.join(path, "buildreq_cache")) if content and content[0] == version: for extra in content[1:]: print("Adding additional build (cache) requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file(os.path.join(path, "pkgconfig_add")) for extra in content: extra = "pkgconfig(%s)" % extra print("Adding additional build requirement: %s." % extra) buildreq.add_buildreq(extra) content = read_conf_file(os.path.join(path, "requires_add")) for extra in content: print("Adding additional runtime requirement: %s." % extra) buildreq.add_requires(extra, override=True) content = read_conf_file(os.path.join(path, "excludes")) for exclude in content: print("%%exclude for: %s." % exclude) filemanager.excludes += content content = read_conf_file(os.path.join(path, "extras")) for extra in content: print("extras for : %s." % extra) filemanager.extras += content content = read_conf_file(os.path.join(path, "dev_extras")) for extra in content: print("dev for : %s." % extra) filemanager.dev_extras += content content = read_conf_file(os.path.join(path, "setuid")) for suid in content: print("setuid for : %s." % suid) filemanager.setuid += content content = read_conf_file(os.path.join(path, "attrs")) for line in content: attr = re.split(r'\(|\)|,', line) attr = [a.strip() for a in attr] filename = attr.pop() print("attr for: %s." % filename) filemanager.attrs[filename] = attr patches += read_conf_file(os.path.join(path, "series")) pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches] cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am)|(configure.ac|configure.in))\" %s" % " ".join( pfiles) # noqa: W605 if patches and call( cmd, check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) == 0: autoreconf = True if any(p.lower().startswith('cve-') for p in patches): config_opts['security_sensitive'] = True rewrite_config_opts(path) content = read_conf_file(os.path.join(path, "configure")) extra_configure = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure32")) extra_configure32 = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure64")) extra_configure64 = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure_avx2")) extra_configure_avx2 = " \\\n".join(content) content = read_conf_file(os.path.join(path, "configure_avx512")) extra_configure_avx512 = " \\\n".join(content) if config_opts["keepstatic"]: disable_static = "" if config_opts['broken_parallel_build']: parallel_build = "" content = read_conf_file(os.path.join(path, "make_args")) if content: extra_make = " \\\n".join(content) content = read_conf_file(os.path.join(path, "make32_args")) if content: extra32_make = " \\\n".join(content) content = read_conf_file(os.path.join(path, "make_install_args")) if content: extra_make_install = " \\\n".join(content) content = read_conf_file(os.path.join(path, "make32_install_args")) if content: extra_make32_install = " \\\n".join(content) content = read_conf_file(os.path.join(path, "install_macro")) if content and content[0]: install_macro = content[0] content = read_conf_file(os.path.join(path, "cmake_args")) if content: extra_cmake = " \\\n".join(content) content = read_conf_file(os.path.join(path, "cmake_srcdir")) if content and content[0]: cmake_srcdir = content[0] content = read_conf_file(os.path.join(path, "subdir")) if content and content[0]: subdir = content[0] content = read_conf_file(os.path.join(path, "build_pattern")) if content and content[0]: buildpattern.set_build_pattern(content[0], 20) autoreconf = False content = read_conf_file(os.path.join(path, "make_check_command")) if content: check.tests_config = '\n'.join(content) content = read_conf_file(os.path.join(path, tarball.name + ".license")) if content and content[0]: words = content[0].split() for word in words: if word.find(":") < 0: license.add_license(word) content = read_conf_file(os.path.join(path, "golang_libpath")) if content and content[0]: tarball.golibpath = content[0] print("golibpath : {}".format(tarball.golibpath)) if config_opts['use_clang']: config_opts['funroll-loops'] = False buildreq.add_buildreq("llvm") if config_opts['32bit']: buildreq.add_buildreq("glibc-libc32") buildreq.add_buildreq("glibc-dev32") buildreq.add_buildreq("gcc-dev32") buildreq.add_buildreq("gcc-libgcc32") buildreq.add_buildreq("gcc-libstdc++32") prep_prepend = read_conf_file(os.path.join(path, "prep_prepend")) if os.path.isfile(os.path.join(path, "prep_append")): os.rename(os.path.join(path, "prep_append"), os.path.join(path, "build_prepend")) make_prepend = read_conf_file(os.path.join(path, "make_prepend")) build_prepend = read_conf_file(os.path.join(path, "build_prepend")) install_prepend = read_conf_file(os.path.join(path, "install_prepend")) if os.path.isfile(os.path.join(path, "make_install_append")): os.rename(os.path.join(path, "make_install_append"), os.path.join(path, "install_append")) install_append = read_conf_file(os.path.join(path, "install_append")) profile_payload = read_conf_file(os.path.join(path, "profile_payload")) custom_desc = read_conf_file(os.path.join(path, "description"))
def main(): parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", nargs=1, action="store", dest="name", default="", help="Override the package name") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument("-i", "--integrity", action="store_true", default=False, help="Attempt to download and verify package signature") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error(argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # build.setup_patterns() tarball.name_and_version(args.url, args.name) tarball.download_tarball(args.url, args.name, args.archives, args.target) _dir = tarball.path if args.license_only: try: with open(os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if word.find(":") < 0: license.add_license(word) except: pass license.scan_for_licenses(name, _dir) exit(0) config.config_file = args.config config.parse_config_files(build.download_path, args.bump) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(name, _dir, build.download_path) specdescription.scan_for_description(name, _dir) license.scan_for_licenses(name, _dir) docs.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) load_specfile(specfile) specfile.write_spec(build.download_path) print("\n") if args.integrity == True: pkg_integrity.check(args.url, build.download_path) while 1: build.package() specfile.packages = files.packages specfile.locales = lang.locales specfile.write_spec(build.download_path) files.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print("Build failed") return elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) with open(build.download_path + "/release", "w") as fp: fp.write(tarball.release + "\n") commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)