def main(): parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", nargs=1, action="store", dest="name", default="", help="Override the package name") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", nargs=1, dest="config", action="store", default="common/autospec.conf", help="Set configuration file to use") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error(argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # build.setup_patterns() tarball.download_tarball(args.url, args.name, args.archives) dir = tarball.path if args.license_only: try: with open(os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if word.find(":") < 0: license.add_license(word) except: pass license.scan_for_licenses(name, dir) exit(0) config.config_file = args.config config.parse_config_files(build.download_path, args.bump) buildreq.scan_for_configure(name, dir, build.download_path) specdescription.scan_for_description(name, dir) license.scan_for_licenses(name, dir) add_sources(build.download_path, args.archives) test.scan_for_tests(dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # write_spec(build.download_path + "/" + tarball.name + ".spec") print("\n") while 1: build.package() write_spec(build.download_path + "/" + tarball.name + ".spec") files.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print("Build failed") return with open(build.download_path + "/release", "w") as fp: fp.write(tarball.release + "\n") if args.git: git.commit_to_git(build.download_path)
def package(args, url, name, archives, workingdir, infile_dict): check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(url, name, args.version, args.target, archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) if args.prep_only: write_prep(workingdir) exit(0) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, tarball.name, tarball.version, tarball.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_build_log(build.download_path, build.round) test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED) if args.git: git.commit_to_git(build.download_path) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(build.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open(os.path.join(build.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(build.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(build.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, build.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(build.download_path, _dir, conf.transforms) add_sources(build.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_mock_logs(build.download_path, build.round) check.check_regression(build.download_path, conf.config_opts['skip_tests']) if build.success == 0: conf.create_buildreq_cache(build.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(build.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(build.download_path + "/release", content.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) conf.create_buildreq_cache(build.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(build.download_path, conf, content.name) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(args.url, args.name, args.version, args.target, args.archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(args.url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", nargs=1, action="store", dest="name", default="", help="Override the package name") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument("-i", "--integrity", action="store_true", default=False, help="Attempt to download and verify package signature") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error(argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # build.setup_patterns() tarball.name_and_version(args.url, args.name) tarball.download_tarball(args.url, args.name, args.archives, args.target) _dir = tarball.path if args.license_only: try: with open(os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if word.find(":") < 0: license.add_license(word) except: pass license.scan_for_licenses(name, _dir) exit(0) config.config_file = args.config config.parse_config_files(build.download_path, args.bump) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(name, _dir, build.download_path) specdescription.scan_for_description(name, _dir) license.scan_for_licenses(name, _dir) docs.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) load_specfile(specfile) specfile.write_spec(build.download_path) print("\n") if args.integrity == True: pkg_integrity.check(args.url, build.download_path) while 1: build.package() specfile.packages = files.packages specfile.locales = lang.locales specfile.write_spec(build.download_path) files.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print("Build failed") return elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) with open(build.download_path + "/release", "w") as fp: fp.write(tarball.release + "\n") commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)