def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) package = build.Build(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(package.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open( os.path.join(package.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(package.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(package.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, package.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(package.download_path, _dir, conf.transforms) add_sources(package.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, package.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(package.download_path) while 1: package.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(package.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(package.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 if package.round > 20 or package.must_restart == 0: break save_mock_logs(package.download_path, package.round) check.check_regression(package.download_path, conf.config_opts['skip_tests']) if package.success == 0: conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(package.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(package.download_path + "/release", content.release + "\n") # record logcheck output logcheck(package.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content, package) conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(package.download_path, conf, content.name, package.success) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package(args, url, name, archives, workingdir, infile_dict): check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(url, name, args.version, args.target, archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) if args.prep_only: write_prep(workingdir) exit(0) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, tarball.name, tarball.version, tarball.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_build_log(build.download_path, build.round) test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED) if args.git: git.commit_to_git(build.download_path) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(args.url, args.name, args.version, args.target, args.archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(args.url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)