def generator(self): """Test template.""" conf = config.Config() conf.parse_config_versions = Mock(return_value={}) name_arg = "" version_arg = "" if state == 1 or state == 3: name_arg = f"state.{name}" if state == 2 or state == 3: version_arg = f"state.{version}" content = tarball.Content(url, name_arg, version_arg, [], conf) content.config = conf pkg = build.Build('/tmp') pkg.download_path = '/download/path/' mgr = files.FileManager(conf, pkg) content.name_and_version(mgr) name_cmp = name version_cmp = version if state == 1 or state == 3: name_cmp = name_arg if state == 2 or state == 3: version_cmp = version_arg self.assertEqual(name_cmp, content.name) self.assertEqual(version_cmp, content.version) # redo without args and verify giturl is set correctly content.name = "" content.version = "" content.name_and_version(Mock()) if "github.com" in url: self.assertRegex(content.giturl, r"https://github.com/[^/]+/" + content.repo + ".git")
def test_parse_build_results_banned_files(self): """ Test parse_build_results with a test log indicating banned files are missing """ def mock_util_call(cmd): del cmd conf = config.Config() conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf) call_backup = build.util.call build.util.call = mock_util_call fm = files.FileManager(conf) open_name = 'build.util.open_auto' content = 'line 1\n' \ 'Installed (but unpackaged) file(s) found:\n' \ '/opt/file\n' \ '/usr/etc/file\n' \ '/usr/local/file\n' \ '/usr/src/file\n' \ '/var/file\n' \ 'RPM build errors\n' \ 'errors here\n' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): build.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual(fm.has_banned, True) # check no files were added self.assertEqual(build.must_restart, 0)
def test_parse_build_results_pkgconfig(self): """ Test parse_build_results with a test log indicating failure due to a missing qmake package (pkgconfig error) """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") conf.config_opts['32bit'] = True call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nwhich: no qmake\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertIn('pkgconfig(Qt)', reqs.buildreqs) self.assertIn('pkgconfig(32Qt)', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_parse_build_results_simple_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing httpd-dev package (simple pat error) """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nchecking for Apache test module support\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertIn('httpd-dev', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_parse_build_results_pkgconfig(self): """ Test parse_build_results with a test log indicating failure due to a missing qmake package (pkgconfig error) """ def mock_util_call(cmd): del cmd build.config.setup_patterns() build.config.config_opts['32bit'] = True call_backup = build.util.call build.util.call = mock_util_call fm = files.FileManager() open_name = 'build.util.open_auto' content = 'line 1\nwhich: no qmake\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): build.parse_build_results('testname', 0, fm) build.util.call = call_backup self.assertIn('pkgconfig(Qt)', build.buildreq.buildreqs) self.assertIn('pkgconfig(32Qt)', build.buildreq.buildreqs) self.assertEqual(build.must_restart, 1)
def test_parse_build_results_failed_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing testpkg (failed pat error). The specific error is a python ImportError. """ def mock_util_call(cmd): del cmd build.config.setup_patterns() call_backup = build.util.call build.util.call = mock_util_call fm = files.FileManager() open_name = 'build.util.open_auto' content = 'line 1\nImportError: No module named testpkg\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): build.parse_build_results('testname', 0, fm) build.util.call = call_backup self.assertIn('testpkg-python', build.buildreq.buildreqs) self.assertEqual(build.must_restart, 1)
def __init__(self, port=None): self.verbose = console.Option(False) self.strict = console.Option(False) self.autoTrace = console.Option(False) self.stretch = console.Option(100) self.console = console.Command() self.console.finishFunction = self.finish if port is None: self.nextPort = randomPort() else: self.nextPort = port self.eventManager = events.EventManager() self.fileManager = files.FileManager(printer=self.console) self.fileManager.purgeResponseFiles() self.requestManager = agents.RequestGenerator(self.eventManager, self.fileManager, self.console, strict=self.strict, verbose=self.verbose) self.servers = {} self.proxyProcess = None self.console.addOption( "strict", self.strict, "Apply strict testing of HTTP requests and responses") self.console.addOption("verbose", self.verbose, "Show details") self.console.addOption( "stretch", self.stretch, "Multiply all delays by factor = stretch / 100") self.console.addOption("autotrace", self.autoTrace, "Trace every request for which check fails") self.console.addCommand("serve", self.doServe, "SID+", "Set up servers") self.console.addCommand( "request", self.doRequest, "ID FILE SID", "Initiate request named ID for FILE from server SID") self.console.addCommand( "fetch", self.doFetch, "ID FILE SID", "Fetch FILE from server SID using request named ID") self.console.addCommand( "respond", self.doRespond, "ID+", "Allow servers to return reponses to requests") self.console.addCommand("delay", self.doDelay, "MS", "Delay for MS milliseconds") self.console.addCommand( "check", self.doCheck, "ID [CODE]", "Make sure request ID handled properly and generated expected CODE" ) self.console.addCommand( "generate", self.doGenerate, "FILE BYTES", "Generate file (extension '.txt' or '.bin') with specified number of bytes" ) self.console.addCommand("delete", self.doDelete, "FILE+", "Delete specified files") self.console.addCommand( "proxy", self.doProxy, "[PATH] ARG*", "(Re)start proxy server (pass arguments to proxy)") self.console.addCommand("trace", self.doTrace, "ID+", "Trace histories of requests")
def test_parse_build_results_files(self): """ Test parse_build_results with a test log indicating files are missing """ def mock_util_call(cmd): del cmd build.config.setup_patterns() call_backup = build.util.call build.util.call = mock_util_call fm = files.FileManager() open_name = 'build.util.open_auto' content = 'line 1\n' \ 'Installed (but unpackaged) file(s) found:\n' \ '/usr/testdir/file\n' \ '/usr/testdir/file1\n' \ '/usr/testdir/file2\n' \ 'RPM build errors\n' \ 'errors here\n' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): build.parse_build_results('testname', 0, fm) build.util.call = call_backup self.assertEqual( fm.files, ['/usr/testdir/file', '/usr/testdir/file1', '/usr/testdir/file2']) # one for each file added self.assertEqual(build.must_restart, 3)
def test_parse_build_results_failed_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing package. """ conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call open_auto_backup = build.util.open_auto build.util.call = MagicMock(return_value=None) pkg = build.Build() fm = files.FileManager(conf, pkg) with open('tests/builderrors', 'r') as f: builderrors = f.readlines() for error in builderrors: if not error.startswith('#'): input, output = error.strip('\n').split('|') reqs.buildreqs = set() build.util.open_auto = mock_open(read_data=input) pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) self.assertIn(output, reqs.buildreqs) self.assertGreater(pkg.must_restart, 0) # Restoring functions build.util.call = call_backup build.util.open_auto = open_auto_backup
def test_parse_build_results_patch(self): """ Test parse_build_results with a test log indicating failure due to a a backport patch no longer applying """ def mock_util_call(cmd): del cmd def mock_conf_remove_backport_patch(patch): del patch return 1 conf = config.Config('') conf.setup_patterns() conf.remove_backport_patch = mock_conf_remove_backport_patch conf.patches = ['backport-test.patch'] reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nPatch #1 (backport-test.patch):\nSkipping patch.' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual(pkg.must_restart, 1)
def __init__(self): self.verbose = console.Option(False) self.strict = console.Option(0) self.autoTrace = console.Option(False) self.stretch = console.Option(100) self.timeout = console.Option(5000) self.checkTiming = console.Option(False) self.checkUnsafe = console.Option(False) self.checkLocking = console.Option(False) self.checkSemaphore = console.Option(False) self.linefeedPercent = console.Option(5) self.console = console.Command() self.console.finishFunction = self.finish self.host = socket.gethostname() self.eventManager = events.EventManager() self.fileManager = files.FileManager(printer = self.console, fileGenerator = findFileGenerator()) self.fileManager.purgeResponseFiles() self.requestManager = agents.RequestGenerator(self.eventManager, self.fileManager, self.console, strict = self.strict, verbose = self.verbose) self.portManager = agents.PortFinder(self.console) self.servers = {} self.monitors = [] self.haveProxy = False self.proxyProcess = None self.activeEvents = {} self.getId = 0 self.console.addOption("strict", self.strict, "Set level of strictness on HTTP message formatting (0-4)") self.console.addOption("timing", self.checkTiming, "Insert random delays into synchronization operations") self.console.addOption("unsafe", self.checkLocking, "Check for thread-unsafe functions") self.console.addOption("locking", self.checkLocking, "Check proper use of locks") self.console.addOption("semaphore", self.checkSemaphore, "Disallow use of semaphores") self.console.addOption("verbose", self.verbose, "Show details") self.console.addOption("stretch", self.stretch, "Multiply all delays by factor = stretch / 100") self.console.addOption("timeout", self.timeout, "Set default timeout for wait (in milliseconds)") self.console.addOption("autotrace", self.autoTrace, "Trace every request for which check fails") self.console.addOption("linefeed", self.linefeedPercent, "Frequency of line feeds in binary files (percent)") self.console.addCommand("serve", self.doServe, "SID+", "Set up servers. (Server with SID starting with '-' is disabled.)") self.console.addCommand("request", self.doRequest, "ID FILE SID", "Initiate request named ID for FILE from server SID") self.console.addCommand("post-request", self.doPostRequest, "ID FILE SID", "Initiate request named ID for FILE from server SID") self.console.addCommand("fetch", self.doFetch, "ID FILE SID", "Fetch FILE from server SID using request named ID") self.console.addCommand("respond", self.doRespond, "ID+", "Allow servers to return reponses to requests") self.console.addCommand("get", self.doGet, "URL", "Retrieve web object with and without proxy and compare the two") self.console.addCommand("delay", self.doDelay, "MS", "Delay for MS milliseconds") self.console.addCommand("check", self.doCheck, "ID [CODE]", "Make sure request ID handled properly and generated expected CODE") self.console.addCommand("generate", self.doGenerate, "FILE BYTES", "Generate file (extension '.txt' or '.bin') with specified number of bytes") self.console.addCommand("delete", self.doDelete, "FILE+", "Delete specified files") self.console.addCommand("proxy", self.doProxy, "[PATH] ARG*", "(Re)start proxy server (pass arguments to proxy)") self.console.addCommand("external", self.doExternalProxy, "HOST:PORT", "Use external proxy") self.console.addCommand("trace", self.doTrace, "ID+", "Trace histories of requests") self.console.addCommand("signal", self.doSignal, "[SIGNO]", "Send signal number SIGNO to process. Default = 13 (SIGPIPE)") self.console.addCommand("disrupt", self.doDisrupt, "(request|response) [SID]", "Schedule disruption of request or response by client [or server SID]") self.console.addCommand("wait", self.doWait, "* | ID+", "Wait until all or listed pending requests, fetches, and responses have completed")
def test_parse_build_results_files(self): """ Test parse_build_results with a test log indicating files are missing """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\n' \ 'Installed (but unpackaged) file(s) found:\n' \ '/usr/testdir/file\n' \ '/usr/testdir/file1\n' \ '/usr/testdir/file2\n' \ 'RPM build errors\n' \ 'errors here\n' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual( fm.files, set([ '/usr/testdir/file', '/usr/testdir/file1', '/usr/testdir/file2' ])) # one for each file added self.assertEqual(pkg.must_restart, 0) self.assertEqual(pkg.file_restart, 3)
def generator(self): """Test template.""" conf = config.Config('/download/path') conf.parse_config_versions = Mock(return_value={}) # Test four different name/version states for tarball.Content, each in # a subtest. Test failures will print these state numbers for easy # identification: # 0 - no state # 1 - name only # 2 - version only # 3 - name and version for state in range(4): with self.subTest(state=state): name_arg = "" version_arg = "" if state == 1 or state == 3: name_arg = f"state.{name}" if state == 2 or state == 3: version_arg = f"state.{version}" content = tarball.Content(url, name_arg, version_arg, [], conf, '/tmp') content.config = conf pkg = build.Build() mgr = files.FileManager(conf, pkg) content.name_and_version(mgr) name_cmp = name version_cmp = version if state == 1 or state == 3: name_cmp = name_arg if state == 2 or state == 3: version_cmp = version_arg self.assertEqual(name_cmp, content.name) self.assertEqual(version_cmp, content.version) # redo without args and verify giturl is set correctly content.name = "" content.version = "" content.name_and_version(Mock()) if "github.com" in url: self.assertRegex(content.giturl, r"https://github.com/[^/]+/" + content.repo + ".git")
def test_parse_build_results_simple_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing httpd-dev package (simple pat error) """ def mock_util_call(cmd): del cmd build.config.setup_patterns() call_backup = build.util.call build.util.call = mock_util_call fm = files.FileManager() open_name = 'build.util.open_auto' content = 'line 1\nchecking for Apache test module support\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): build.parse_build_results('testname', 0, fm) build.util.call = call_backup self.assertIn('httpd-dev', build.buildreq.buildreqs) self.assertEqual(build.must_restart, 1)
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(args.url, args.name, args.version, args.target, args.archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(args.url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)
def package(args, url, name, archives, workingdir, infile_dict): check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(url, name, args.version, args.target, archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) if args.prep_only: write_prep(workingdir) exit(0) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, tarball.name, tarball.version, tarball.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_build_log(build.download_path, build.round) test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED) if args.git: git.commit_to_git(build.download_path) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) package = build.Build(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(package.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open( os.path.join(package.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(package.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(package.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, package.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(package.download_path, _dir, conf.transforms) add_sources(package.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, package.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(package.download_path) while 1: package.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(package.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(package.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 if package.round > 20 or package.must_restart == 0: break save_mock_logs(package.download_path, package.round) check.check_regression(package.download_path, conf.config_opts['skip_tests']) if package.success == 0: conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(package.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(package.download_path + "/release", content.release + "\n") # record logcheck output logcheck(package.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content, package) conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(package.download_path, conf, content.name, package.success) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package( args, url, name, archives, archives_from_git, workingdir, download_from_git, branch, redownload_from_git, redownload_archive, force_module, force_fullclone, mock_dir, short_circuit, do_file_restart, ): """Entry point for building a package with autospec.""" conf = config.Config(args.target) conf.parse_config_files_early() if util.debugging: print_debug(f"url 1: {url}") new_archives_from_git = [] name_re_escaped = re.escape(name) # Download the source from git if necessary if download_from_git: giturl = url found_file = False fileslist = None download_file_full_path = "" if util.debugging: print_debug(f"url 2: {url}") print_debug(f"BRANCH 2: {branch}") # filename_re = re.compile(r"^{}{}".format(name, r"(-|-.)(\d+)(\.\d+)+\.tar\.gz")) filename_re = re.compile(r"^{}{}".format(name_re_escaped, r"-.*\.tar\.gz")) if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"package_path 11: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 21: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 11: {download_file_full_path}") print_debug(f"giturl 11: {giturl}") else: package_path = f"packages/{name}" if util.debugging: print_debug(f"package_path 12: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 22: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 12: {download_file_full_path}") print_debug(f"giturl 12: {giturl}") else: giturl = "" url = download.do_curl_get_effective_url(url) if archives_from_git: arch_url = [] arch_destination = [] arch_branch = [] arch_submodule = [] arch_forcefullclone = [] if util.debugging: print_debug(f"ARCHIVES_GIT 2: {archives_from_git}") print_debug(f"archives in options.conf: {archives}\n\n") archives_re = re.compile(r"^file:\/\/") index_f = [] for index, url_entry in enumerate(archives): if archives_re.search(url_entry): index_f.append(index) if util.debugging: for x in range(len(index_f) - 1, -1, -1): print_debug( f"rm {index_f[x]}:{archives[index_f[x]]} {index_f[x] + 1}:{archives[index_f[x] + 1]}" ) for x in sorted(range(len(index_f) - 1, -1, -1), reverse=True): del archives[index_f[x]:index_f[x] + 2] if util.debugging: print_debug(f"archives in options.conf: {archives}") for aurl, dest, br, sm, ffc in zip(archives_from_git[::5], archives_from_git[1::5], archives_from_git[2::5], archives_from_git[3::5], archives_from_git[4::5]): arch_url.append(aurl) arch_destination.append(dest) arch_branch.append(br) arch_submodule.append(sm) arch_forcefullclone.append(ffc) if util.debugging: print_debug( f"FOR ZIP {arch_url[-1]} - {arch_destination[-1]} - {arch_branch[-1]} - {arch_submodule[-1]} - {arch_forcefullclone[-1]}" ) for index, new_arch_url in enumerate(arch_url, start=0): found_file = False fileslist = [] download_file_full_path = "" arch_name = os.path.splitext(os.path.basename(new_arch_url))[0] arch_name_re_escaped = re.escape(arch_name) filename_re = re.compile(r"^{}{}".format(arch_name_re_escaped, r"-.*\.tar\.gz")) if util.debugging: print_debug(f"arch_name: {arch_name}") if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"archive package_path 1: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"filename: {filename.name}") print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 1: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 1: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 1: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) else: package_path = f"packages/{name}" if util.debugging: print_debug(f"archive package_path 2: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 2: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 2: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 2: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) if util.debugging: print_debug(f"new_archives_from_git: {new_archives_from_git}\n") #check_requirements(args.git) conf.detect_build_from_url(url) package = build.Build() # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package, mock_dir, short_circuit) if util.debugging: print_debug(f"url 4: {url}") print_debug(f"archives 4: {archives}") print_debug(f"new_archives_from_git 4: {new_archives_from_git}") content = tarball.Content(url, name, args.version, archives, conf, workingdir, giturl, download_from_git, branch, new_archives_from_git, force_module, force_fullclone) content.process(filemanager) conf.create_versions(content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req(conf) conf.parse_config_files(args.bump, filemanager, content.version, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) if args.license_only: try: with open( os.path.join(conf.download_path, content.name + ".license"), "r", ) as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, name) exit(0) if short_circuit == "prep" or short_circuit is None: requirements.scan_for_configure(_dir, content.name, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(conf.download_path, _dir, conf.transforms) conf.add_sources(archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content, mock_dir, short_circuit) filemanager.load_specfile(specfile) load_specfile(conf, specfile) if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) if short_circuit == "prep" or short_circuit is None: conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) # conf.create_reqs_cache(content.version, requirements.reqs_cache) specfile.write_spec() filemanager.load_specfile_information(specfile, content) if short_circuit == "prep": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/SRPMS/" ) util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/BUILD/" ) if short_circuit == "install": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/RPMS/" ) while 1: package.package( filemanager, args.mock_config, args.mock_opts, conf, requirements, content, mock_dir, short_circuit, do_file_restart, args.cleanup, ) if (short_circuit != package.short_circuit): print_info(f"short_circuit: {short_circuit}") print_info(f"package.short_circuit: {package.short_circuit}") short_circuit = package.short_circuit print_info(f"new short_circuit: {short_circuit}") filemanager.load_specfile_information(specfile, content) filemanager.load_specfile(specfile) specfile.write_spec() filemanager.newfiles_printed = 0 mock_chroot = f"{mock_dir}/clear-{package.uniqueext}/root/builddir/build/BUILDROOT/{content.name}-{content.version}-{content.release}.x86_64" if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 print_info(f"filemanager.clean_directories({mock_chroot})") if do_file_restart: if package.round > 20 or (package.must_restart == 0 and package.file_restart == 0): if (short_circuit == "install"): print_info(f"short_circuit: {short_circuit}") print_info( f"package.short_circuit: {package.short_circuit}") short_circuit = "binary" print_info(f"new short_circuit: {short_circuit}") continue else: break else: if (package.round > 20 or package.must_restart == 0): break save_mock_logs(conf.download_path, package.round) #if short_circuit is None or short_circuit == "install": #check.check_regression(conf.download_path, conf.config_opts["skip_tests"]) #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) #conf.create_reqs_cache(content.version, requirements.reqs_cache) if package.success == 0: #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif (package.success == 1): if os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass if (short_circuit is None): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(conf.download_path + "/release", content.release + "\n") # record logcheck output #logcheck(conf.download_path) #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) elif (short_circuit == "prep"): write_out(conf.download_path + "/release", content.release + "\n") #elif (short_circuit == "build"): # record logcheck output #logcheck(conf.download_path) #elif (short_circuit == "install"): ## record logcheck output #logcheck(conf.download_path) elif (short_circuit == "binary"): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) #write_out(conf.download_path + "/release", content.release + "\n") #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) #else: #print("To commit your changes, git add the relevant files and run 'git commit -F commitmsg'") link_new_rpms_here()
def __init__(self, port=None): self.verbose = console.Option(False) self.strict = console.Option(0) self.autoTrace = console.Option(False) self.stretch = console.Option(100) self.timeout = console.Option(3000) self.console = console.Command() self.console.finishFunction = self.finish if port is None: self.nextPort = randomPort() else: self.nextPort = port self.host = socket.gethostname() self.eventManager = events.EventManager() self.fileManager = files.FileManager(printer=self.console) self.fileManager.purgeResponseFiles() self.requestManager = agents.RequestGenerator(self.eventManager, self.fileManager, self.console, strict=self.strict, verbose=self.verbose) self.servers = {} self.haveProxy = False self.proxyProcess = None self.activeEvents = {} self.console.addOption( "strict", self.strict, "Set level of strictness on HTTP message formatting (0-4)") self.console.addOption("verbose", self.verbose, "Show details") self.console.addOption( "stretch", self.stretch, "Multiply all delays by factor = stretch / 100") self.console.addOption( "timeout", self.timeout, "Set default timeout for wait (in milliseconds)") self.console.addOption("autotrace", self.autoTrace, "Trace every request for which check fails") self.console.addCommand( "serve", self.doServe, "SID+", "Set up servers. (Server with SID starting with '-' is disabled.)" ) self.console.addCommand( "request", self.doRequest, "ID FILE SID", "Initiate request named ID for FILE from server SID") self.console.addCommand( "fetch", self.doFetch, "ID FILE SID", "Fetch FILE from server SID using request named ID") self.console.addCommand( "respond", self.doRespond, "ID+", "Allow servers to return reponses to requests") self.console.addCommand("delay", self.doDelay, "MS", "Delay for MS milliseconds") self.console.addCommand( "check", self.doCheck, "ID [CODE]", "Make sure request ID handled properly and generated expected CODE" ) self.console.addCommand( "generate", self.doGenerate, "FILE BYTES", "Generate file (extension '.txt' or '.bin') with specified number of bytes" ) self.console.addCommand("delete", self.doDelete, "FILE+", "Delete specified files") self.console.addCommand( "proxy", self.doProxy, "[PATH] ARG*", "(Re)start proxy server (pass arguments to proxy)") self.console.addCommand("external", self.doExternalProxy, "HOST:PORT", "Use external proxy") self.console.addCommand("trace", self.doTrace, "ID+", "Trace histories of requests") self.console.addCommand( "signal", self.doSignal, "[SIGNO]", "Send signal number SIGNO to process. Default = 13 (SIGPIPE)") self.console.addCommand( "disrupt", self.doDisrupt, "(request|response) [SID]", "Schedule disruption of request or response by client [or server SID]" ) self.console.addCommand( "wait", self.doWait, "* | ID+", "Wait until all or listed pending requests, fetches, and responses have completed" )