def test_guess_commit_message_cve_config(self): """ Test guess_commit_message() with mocked internal functions and both commitmessage information and cves available from newsfile. A cve is also available from config, which changes the first line of the commmit message. """ process_NEWS_backup = commitmessage.process_NEWS def mock_process_NEWS(newsfile): return (['', 'commit', 'message', 'with', 'cves', ''], set(['cve1', 'cve2'])) commitmessage.process_NEWS = mock_process_NEWS commitmessage.config.cves = set(['CVE-1234-5678']) commitmessage.config.old_version = None # Allow cve title to be set open_name = 'util.open' with mock.patch(open_name, create=True) as mock_open: mock_open.return_value = mock.MagicMock() commitmessage.guess_commit_message() # reset mocks before asserting so a failure doesn't cascade to # other tests commitmessage.process_NEWS = process_NEWS_backup commitmessage.config.cves = set() fh = mock_open.return_value.__enter__.return_value fh.write.assert_called_with( 'testball: Fix for CVE-1234-5678\n\n\ncommit\nmessage\nwith\n' 'cves\n\n\ncommit\nmessage\nwith\ncves\n\nCVEs fixed in this ' 'build:\nCVE-1234-5678\ncve1\ncve2\n\n')
def test_guess_commit_message_imported_key(self): """ Test guess_commit_message() with mocked internal functions and both commitmessage information and cves available from newsfile. A cve is also available from config, which changes the first line of the commmit message. Additionally there is imported key info that will be displayed at the end of the message. """ conf = config.Config() tcontent = tarball.Content("", "testball", "0.0.1", [], conf) conf.content = tcontent process_NEWS_backup = commitmessage.process_NEWS def mock_process_NEWS(newsfile, old_version, name, version): return (['', 'commit', 'message', 'with', 'cves', ''], set(['cve1', 'cve2'])) commitmessage.process_NEWS = mock_process_NEWS conf.cves = set(['CVE-1234-5678']) conf.old_version = None # Allow cve title to be set open_name = 'util.open_auto' with mock.patch(open_name, create=True) as mock_open: mock_open.return_value = mock.MagicMock() commitmessage.guess_commit_message("keyinfo content", conf, tcontent) # reset mocks before asserting so a failure doesn't cascade to # other tests commitmessage.process_NEWS = process_NEWS_backup fh = mock_open.return_value.__enter__.return_value fh.write.assert_called_with( 'testball: Fix for CVE-1234-5678\n\n\ncommit\nmessage\nwith\n' 'cves\n\n\ncommit\nmessage\nwith\ncves\n\nCVEs fixed in this ' 'build:\nCVE-1234-5678\ncve1\ncve2\n\nKey imported:\nkeyinfo ' 'content\n')
def test_guess_commit_message(self): """ Test guess_commit_message() with mocked internal functions and both commitmessage information and cves available from newsfile. """ process_NEWS_backup = commitmessage.process_NEWS def mock_process_NEWS(newsfile): return (['', 'commit', 'message', 'with', 'cves', ''], set(['cve1', 'cve2'])) commitmessage.process_NEWS = mock_process_NEWS open_name = 'util.open' with mock.patch(open_name, create=True) as mock_open: mock_open.return_value = mock.MagicMock() commitmessage.guess_commit_message() # reset mocks before asserting so a failure doesn't cascade to # other tests commitmessage.process_NEWS = process_NEWS_backup fh = mock_open.return_value.__enter__.return_value fh.write.assert_called_with( 'testball: Autospec creation for update from version 0.0.0 to ' 'version 0.0.1\n\n\ncommit\nmessage\nwith\ncves\n\n\ncommit\n' 'message\nwith\ncves\n\nCVEs fixed in this build:\ncve1\ncve2' '\n\n')
def test_guess_commit_message(self): """ Test guess_commit_message() with mocked internal functions and both commitmessage information and cves available from newsfile. """ conf = config.Config() conf.old_version = "0.0.0" tcontent = tarball.Content("", "testball", "0.0.1", [], conf) conf.content = tcontent process_NEWS_backup = commitmessage.process_NEWS def mock_process_NEWS(newsfile, old_version, name, version): return (['', 'commit', 'message', 'with', 'cves', ''], set(['cve1', 'cve2'])) commitmessage.process_NEWS = mock_process_NEWS commitmessage.build.download_path = "" open_name = 'util.open_auto' with mock.patch(open_name, create=True) as mock_open: mock_open.return_value = mock.MagicMock() commitmessage.guess_commit_message("", conf, tcontent) # reset mocks before asserting so a failure doesn't cascade to # other tests commitmessage.process_NEWS = process_NEWS_backup fh = mock_open.return_value.__enter__.return_value fh.write.assert_called_with( 'testball: Autospec creation for update from version 0.0.0 to ' 'version 0.0.1\n\n\ncommit\nmessage\nwith\ncves\n\n\ncommit\n' 'message\nwith\ncves\n\nCVEs fixed in this build:\ncve1\ncve2' '\n\n')
def package(args, url, name, archives, workingdir, infile_dict): check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(url, name, args.version, args.target, archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) if args.prep_only: write_prep(workingdir) exit(0) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, tarball.name, tarball.version, tarball.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_build_log(build.download_path, build.round) test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED) if args.git: git.commit_to_git(build.download_path) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) package = build.Build(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(package.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open( os.path.join(package.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(package.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(package.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, package.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(package.download_path, _dir, conf.transforms) add_sources(package.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, package.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(package.download_path) while 1: package.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(package.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(package.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 if package.round > 20 or package.must_restart == 0: break save_mock_logs(package.download_path, package.round) check.check_regression(package.download_path, conf.config_opts['skip_tests']) if package.success == 0: conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(package.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(package.download_path + "/release", content.release + "\n") # record logcheck output logcheck(package.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content, package) conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(package.download_path, conf, content.name, package.success) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(args.url, args.name, args.version, args.target, args.archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(args.url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", nargs=1, action="store", dest="name", default="", help="Override the package name") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument("-i", "--integrity", action="store_true", default=False, help="Attempt to download and verify package signature") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error(argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # build.setup_patterns() tarball.name_and_version(args.url, args.name) tarball.download_tarball(args.url, args.name, args.archives, args.target) _dir = tarball.path if args.license_only: try: with open(os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if word.find(":") < 0: license.add_license(word) except: pass license.scan_for_licenses(name, _dir) exit(0) config.config_file = args.config config.parse_config_files(build.download_path, args.bump) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(name, _dir, build.download_path) specdescription.scan_for_description(name, _dir) license.scan_for_licenses(name, _dir) docs.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) load_specfile(specfile) specfile.write_spec(build.download_path) print("\n") if args.integrity == True: pkg_integrity.check(args.url, build.download_path) while 1: build.package() specfile.packages = files.packages specfile.locales = lang.locales specfile.write_spec(build.download_path) files.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print("Build failed") return elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) with open(build.download_path + "/release", "w") as fp: fp.write(tarball.release + "\n") commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)