def test_guess_commit_message(self): """ Test guess_commit_message() with mocked internal functions and both commitmessage information and cves available from newsfile. """ conf = config.Config() conf.old_version = "0.0.0" tcontent = tarball.Content("", "testball", "0.0.1", [], conf) conf.content = tcontent process_NEWS_backup = commitmessage.process_NEWS def mock_process_NEWS(newsfile, old_version, name, version): return (['', 'commit', 'message', 'with', 'cves', ''], set(['cve1', 'cve2'])) commitmessage.process_NEWS = mock_process_NEWS commitmessage.build.download_path = "" open_name = 'util.open_auto' with mock.patch(open_name, create=True) as mock_open: mock_open.return_value = mock.MagicMock() commitmessage.guess_commit_message("", conf, tcontent) # reset mocks before asserting so a failure doesn't cascade to # other tests commitmessage.process_NEWS = process_NEWS_backup fh = mock_open.return_value.__enter__.return_value fh.write.assert_called_with( 'testball: Autospec creation for update from version 0.0.0 to ' 'version 0.0.1\n\n\ncommit\nmessage\nwith\ncves\n\n\ncommit\n' 'message\nwith\ncves\n\nCVEs fixed in this build:\ncve1\ncve2' '\n\n')
def test_parse_build_results_pkgconfig(self): """ Test parse_build_results with a test log indicating failure due to a missing qmake package (pkgconfig error) """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") conf.config_opts['32bit'] = True call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nwhich: no qmake\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertIn('pkgconfig(Qt)', reqs.buildreqs) self.assertIn('pkgconfig(32Qt)', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_parse_build_results_patch(self): """ Test parse_build_results with a test log indicating failure due to a a backport patch no longer applying """ def mock_util_call(cmd): del cmd def mock_conf_remove_backport_patch(patch): del patch return 1 conf = config.Config('') conf.setup_patterns() conf.remove_backport_patch = mock_conf_remove_backport_patch conf.patches = ['backport-test.patch'] reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nPatch #1 (backport-test.patch):\nSkipping patch.' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual(pkg.must_restart, 1)
def test_parse_build_results_failed_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing package. """ conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call open_auto_backup = build.util.open_auto build.util.call = MagicMock(return_value=None) pkg = build.Build() fm = files.FileManager(conf, pkg) with open('tests/builderrors', 'r') as f: builderrors = f.readlines() for error in builderrors: if not error.startswith('#'): input, output = error.strip('\n').split('|') reqs.buildreqs = set() build.util.open_auto = mock_open(read_data=input) pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) self.assertIn(output, reqs.buildreqs) self.assertGreater(pkg.must_restart, 0) # Restoring functions build.util.call = call_backup build.util.open_auto = open_auto_backup
def generator(self): """Test template.""" conf = config.Config() conf.parse_config_versions = Mock(return_value={}) name_arg = "" version_arg = "" if state == 1 or state == 3: name_arg = f"state.{name}" if state == 2 or state == 3: version_arg = f"state.{version}" content = tarball.Content(url, name_arg, version_arg, [], conf) content.config = conf content.name_and_version(Mock()) name_cmp = name version_cmp = version if state == 1 or state == 3: name_cmp = name_arg if state == 2 or state == 3: version_cmp = version_arg self.assertEqual(name_cmp, content.name) self.assertEqual(version_cmp, content.version) # redo without args and verify giturl is set correctly content.name = "" content.version = "" content.name_and_version(Mock()) if "github.com" in url: self.assertRegex( content.giturl, r"https://github.com/[^/]+/" + content.repo + ".git")
def test_parse_build_results_banned_files(self): """ Test parse_build_results with a test log indicating banned files are missing """ def mock_util_call(cmd): del cmd conf = config.Config() conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf) call_backup = build.util.call build.util.call = mock_util_call fm = files.FileManager(conf) open_name = 'build.util.open_auto' content = 'line 1\n' \ 'Installed (but unpackaged) file(s) found:\n' \ '/opt/file\n' \ '/usr/etc/file\n' \ '/usr/local/file\n' \ '/usr/src/file\n' \ '/var/file\n' \ 'RPM build errors\n' \ 'errors here\n' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): build.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual(fm.has_banned, True) # check no files were added self.assertEqual(build.must_restart, 0)
def test_guess_commit_message_imported_key(self): """ Test guess_commit_message() with mocked internal functions and both commitmessage information and cves available from newsfile. A cve is also available from config, which changes the first line of the commmit message. Additionally there is imported key info that will be displayed at the end of the message. """ conf = config.Config() tcontent = tarball.Content("", "testball", "0.0.1", [], conf) conf.content = tcontent process_NEWS_backup = commitmessage.process_NEWS def mock_process_NEWS(newsfile, old_version, name, version): return (['', 'commit', 'message', 'with', 'cves', ''], set(['cve1', 'cve2'])) commitmessage.process_NEWS = mock_process_NEWS conf.cves = set(['CVE-1234-5678']) conf.old_version = None # Allow cve title to be set open_name = 'util.open_auto' with mock.patch(open_name, create=True) as mock_open: mock_open.return_value = mock.MagicMock() commitmessage.guess_commit_message("keyinfo content", conf, tcontent) # reset mocks before asserting so a failure doesn't cascade to # other tests commitmessage.process_NEWS = process_NEWS_backup fh = mock_open.return_value.__enter__.return_value fh.write.assert_called_with( 'testball: Fix for CVE-1234-5678\n\n\ncommit\nmessage\nwith\n' 'cves\n\n\ncommit\nmessage\nwith\ncves\n\nCVEs fixed in this ' 'build:\nCVE-1234-5678\ncve1\ncve2\n\nKey imported:\nkeyinfo ' 'content\n')
def test_parse_build_results_simple_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing httpd-dev package (simple pat error) """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nchecking for Apache test module support\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertIn('httpd-dev', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def setUp(self): """Set up default values before start test.""" # Set strenght to 0 so it can be updated during tests tarball.build.base_path = '/tmp' tarball.build.download_path = '/download/path/' conf = config.Config() self.content = tarball.Content('', '', '', [], conf) conf.content = self.content
def setUp(self): # url, version, name, release url = "http://www.testpkg.com/testpkg/pkg-1.0.tar.gz" conf = config.Config("") content = tarball.Content('', '', '', [], conf, "") conf.content = content self.specfile = specfiles.Specfile(url, '1.1.1', 'test_pkg', '1', conf, buildreq.Requirements(url), content) self.bb_dict = {"DEPENDS": "ncurses gettext-native", "LICENSE": "new"}
def test_scan_for_tests_perlcheck_PL(self): """ Test scan_for_tests with perlcheck suite """ reqs = buildreq.Requirements("") conf = config.Config("") tcontent = tarball.Content("", "", "", [], conf, "") listdir_backup = os.listdir check.os.listdir = mock_generator(['Makefile.PL']) conf.default_pattern = "cpan" check.scan_for_tests('pkgdir', conf, reqs, tcontent) check.os.listdir = listdir_backup self.assertEqual(check.tests_config, 'make TEST_VERBOSE=1 test')
def setUp(self): conf = config.Config() conf.config_opts['dev_requires_extras'] = False url = "http://www.testpkg.com/testpkg/pkg-1.0.tar.gz" content = tarball.Content(url, 'pkg', '1.0', [], conf) conf.content = content reqs = buildreq.Requirements(url) self.specfile = specfiles.Specfile(url, '1.0', 'pkg', '2', conf, reqs, content) def mock_write(string): self.WRITES.append(string) self.specfile._write = mock_write self.specfile._write_strip = mock_write self.WRITES = []
def test_scan_for_tests_tox_requires(self): """ Test scan_for_tests with tox.ini in the files list, should add several build requirements """ reqs = buildreq.Requirements("") conf = config.Config("") tcontent = tarball.Content("", "", "", [], conf, "") listdir_backup = os.listdir check.os.listdir = mock_generator(['tox.ini']) check.scan_for_tests('pkgdir', conf, reqs, tcontent) check.os.listdir = listdir_backup self.assertEqual( reqs.buildreqs, set(['tox', 'pytest', 'virtualenv', 'pluggy', 'py-python']))
def test_scan_for_tests_cmake(self): """ Test scan_for_tests with cmake suite """ reqs = buildreq.Requirements("") conf = config.Config("") tcontent = tarball.Content("", "", "", [], conf, "") listdir_backup = os.listdir check.os.listdir = mock_generator(['CMakeLists.txt']) content = 'enable_testing' m_open = mock_open(read_data=content) with patch(self.open_name, m_open, create=True): conf.default_pattern = "cmake" check.scan_for_tests('pkgdir', conf, reqs, tcontent) check.os.listdir = listdir_backup self.assertEqual(check.tests_config, 'cd clr-build; make test')
def test_scan_for_tests_perlcheck_in(self): """ Test scan_for_tests with perlcheck suite via Makefile.in """ reqs = buildreq.Requirements("") conf = config.Config("") tcontent = tarball.Content("", "", "", [], conf, "") listdir_backup = os.listdir check.os.listdir = mock_generator(['Makefile.in']) content = 'test:' m_open = mock_open(read_data=content) with patch(self.open_name, m_open, create=True): conf.default_pattern = "cpan" check.scan_for_tests('pkgdir', conf, reqs, tcontent) check.os.listdir = listdir_backup self.assertEqual(check.tests_config, 'make TEST_VERBOSE=1 test')
def test_scan_for_tests_makecheck_am(self): """ Test scan_for_tests with makecheck suite via Makefile.am """ reqs = buildreq.Requirements("") conf = config.Config("") tcontent = tarball.Content("", "", "", [], conf, "") listdir_backup = os.listdir check.os.listdir = mock_generator(['Makefile.am']) m_open = mock_open() with patch(self.open_name, m_open, create=True): conf.default_pattern = "configure_ac" check.scan_for_tests('pkgdir', conf, reqs, tcontent) check.os.listdir = listdir_backup self.assertEqual(check.tests_config, 'make %{?_smp_mflags} check')
def test_scan_for_tests_setup(self): """ Test scan_for_tests with setup.py suite """ reqs = buildreq.Requirements("") conf = config.Config("") tcontent = tarball.Content("", "", "", [], conf, "") listdir_backup = os.listdir check.os.listdir = mock_generator(['setup.py']) content = 'test_suite' m_open = mock_open(read_data=content) with patch(self.open_name, m_open, create=True): conf.default_pattern = "distutils3" check.scan_for_tests('pkgdir', conf, reqs, tcontent) check.os.listdir = listdir_backup self.assertEqual(check.tests_config, 'PYTHONPATH=%{buildroot}$(python -c "import sys; print(sys.path[-1])") ' 'python setup.py test')
def test_parse_build_results_files(self): """ Test parse_build_results with a test log indicating files are missing """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\n' \ 'Installed (but unpackaged) file(s) found:\n' \ '/usr/testdir/file\n' \ '/usr/testdir/file1\n' \ '/usr/testdir/file2\n' \ 'RPM build errors\n' \ 'errors here\n' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual( fm.files, set([ '/usr/testdir/file', '/usr/testdir/file1', '/usr/testdir/file2' ])) # one for each file added self.assertEqual(pkg.must_restart, 0) self.assertEqual(pkg.file_restart, 3)
def generator(self): """Test template.""" conf = config.Config('/download/path') conf.parse_config_versions = Mock(return_value={}) # Test four different name/version states for tarball.Content, each in # a subtest. Test failures will print these state numbers for easy # identification: # 0 - no state # 1 - name only # 2 - version only # 3 - name and version for state in range(4): with self.subTest(state=state): name_arg = "" version_arg = "" if state == 1 or state == 3: name_arg = f"state.{name}" if state == 2 or state == 3: version_arg = f"state.{version}" content = tarball.Content(url, name_arg, version_arg, [], conf, '/tmp') content.config = conf pkg = build.Build() mgr = files.FileManager(conf, pkg) content.name_and_version(mgr) name_cmp = name version_cmp = version if state == 1 or state == 3: name_cmp = name_arg if state == 2 or state == 3: version_cmp = version_arg self.assertEqual(name_cmp, content.name) self.assertEqual(version_cmp, content.version) # redo without args and verify giturl is set correctly content.name = "" content.version = "" content.name_and_version(Mock()) if "github.com" in url: self.assertRegex(content.giturl, r"https://github.com/[^/]+/" + content.repo + ".git")
def setUp(self): """Set up default values before start test.""" # Set strenght to 0 so it can be updated during tests conf = config.Config() self.content = tarball.Content('', '', '', [], conf) conf.content = self.content
def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) package = build.Build(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(package.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open( os.path.join(package.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(package.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(package.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, package.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(package.download_path, _dir, conf.transforms) add_sources(package.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, package.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(package.download_path) while 1: package.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(package.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(package.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 if package.round > 20 or package.must_restart == 0: break save_mock_logs(package.download_path, package.round) check.check_regression(package.download_path, conf.config_opts['skip_tests']) if package.success == 0: conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(package.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(package.download_path + "/release", content.release + "\n") # record logcheck output logcheck(package.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content, package) conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(package.download_path, conf, content.name, package.success) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package( args, url, name, archives, archives_from_git, workingdir, download_from_git, branch, redownload_from_git, redownload_archive, force_module, force_fullclone, mock_dir, short_circuit, do_file_restart, ): """Entry point for building a package with autospec.""" conf = config.Config(args.target) conf.parse_config_files_early() if util.debugging: print_debug(f"url 1: {url}") new_archives_from_git = [] name_re_escaped = re.escape(name) # Download the source from git if necessary if download_from_git: giturl = url found_file = False fileslist = None download_file_full_path = "" if util.debugging: print_debug(f"url 2: {url}") print_debug(f"BRANCH 2: {branch}") # filename_re = re.compile(r"^{}{}".format(name, r"(-|-.)(\d+)(\.\d+)+\.tar\.gz")) filename_re = re.compile(r"^{}{}".format(name_re_escaped, r"-.*\.tar\.gz")) if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"package_path 11: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 21: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 11: {download_file_full_path}") print_debug(f"giturl 11: {giturl}") else: package_path = f"packages/{name}" if util.debugging: print_debug(f"package_path 12: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 22: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 12: {download_file_full_path}") print_debug(f"giturl 12: {giturl}") else: giturl = "" url = download.do_curl_get_effective_url(url) if archives_from_git: arch_url = [] arch_destination = [] arch_branch = [] arch_submodule = [] arch_forcefullclone = [] if util.debugging: print_debug(f"ARCHIVES_GIT 2: {archives_from_git}") print_debug(f"archives in options.conf: {archives}\n\n") archives_re = re.compile(r"^file:\/\/") index_f = [] for index, url_entry in enumerate(archives): if archives_re.search(url_entry): index_f.append(index) if util.debugging: for x in range(len(index_f) - 1, -1, -1): print_debug( f"rm {index_f[x]}:{archives[index_f[x]]} {index_f[x] + 1}:{archives[index_f[x] + 1]}" ) for x in sorted(range(len(index_f) - 1, -1, -1), reverse=True): del archives[index_f[x]:index_f[x] + 2] if util.debugging: print_debug(f"archives in options.conf: {archives}") for aurl, dest, br, sm, ffc in zip(archives_from_git[::5], archives_from_git[1::5], archives_from_git[2::5], archives_from_git[3::5], archives_from_git[4::5]): arch_url.append(aurl) arch_destination.append(dest) arch_branch.append(br) arch_submodule.append(sm) arch_forcefullclone.append(ffc) if util.debugging: print_debug( f"FOR ZIP {arch_url[-1]} - {arch_destination[-1]} - {arch_branch[-1]} - {arch_submodule[-1]} - {arch_forcefullclone[-1]}" ) for index, new_arch_url in enumerate(arch_url, start=0): found_file = False fileslist = [] download_file_full_path = "" arch_name = os.path.splitext(os.path.basename(new_arch_url))[0] arch_name_re_escaped = re.escape(arch_name) filename_re = re.compile(r"^{}{}".format(arch_name_re_escaped, r"-.*\.tar\.gz")) if util.debugging: print_debug(f"arch_name: {arch_name}") if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"archive package_path 1: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"filename: {filename.name}") print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 1: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 1: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 1: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) else: package_path = f"packages/{name}" if util.debugging: print_debug(f"archive package_path 2: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 2: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 2: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 2: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) if util.debugging: print_debug(f"new_archives_from_git: {new_archives_from_git}\n") #check_requirements(args.git) conf.detect_build_from_url(url) package = build.Build() # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package, mock_dir, short_circuit) if util.debugging: print_debug(f"url 4: {url}") print_debug(f"archives 4: {archives}") print_debug(f"new_archives_from_git 4: {new_archives_from_git}") content = tarball.Content(url, name, args.version, archives, conf, workingdir, giturl, download_from_git, branch, new_archives_from_git, force_module, force_fullclone) content.process(filemanager) conf.create_versions(content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req(conf) conf.parse_config_files(args.bump, filemanager, content.version, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) if args.license_only: try: with open( os.path.join(conf.download_path, content.name + ".license"), "r", ) as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, name) exit(0) if short_circuit == "prep" or short_circuit is None: requirements.scan_for_configure(_dir, content.name, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(conf.download_path, _dir, conf.transforms) conf.add_sources(archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content, mock_dir, short_circuit) filemanager.load_specfile(specfile) load_specfile(conf, specfile) if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) if short_circuit == "prep" or short_circuit is None: conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) # conf.create_reqs_cache(content.version, requirements.reqs_cache) specfile.write_spec() filemanager.load_specfile_information(specfile, content) if short_circuit == "prep": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/SRPMS/" ) util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/BUILD/" ) if short_circuit == "install": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/RPMS/" ) while 1: package.package( filemanager, args.mock_config, args.mock_opts, conf, requirements, content, mock_dir, short_circuit, do_file_restart, args.cleanup, ) if (short_circuit != package.short_circuit): print_info(f"short_circuit: {short_circuit}") print_info(f"package.short_circuit: {package.short_circuit}") short_circuit = package.short_circuit print_info(f"new short_circuit: {short_circuit}") filemanager.load_specfile_information(specfile, content) filemanager.load_specfile(specfile) specfile.write_spec() filemanager.newfiles_printed = 0 mock_chroot = f"{mock_dir}/clear-{package.uniqueext}/root/builddir/build/BUILDROOT/{content.name}-{content.version}-{content.release}.x86_64" if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 print_info(f"filemanager.clean_directories({mock_chroot})") if do_file_restart: if package.round > 20 or (package.must_restart == 0 and package.file_restart == 0): if (short_circuit == "install"): print_info(f"short_circuit: {short_circuit}") print_info( f"package.short_circuit: {package.short_circuit}") short_circuit = "binary" print_info(f"new short_circuit: {short_circuit}") continue else: break else: if (package.round > 20 or package.must_restart == 0): break save_mock_logs(conf.download_path, package.round) #if short_circuit is None or short_circuit == "install": #check.check_regression(conf.download_path, conf.config_opts["skip_tests"]) #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) #conf.create_reqs_cache(content.version, requirements.reqs_cache) if package.success == 0: #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif (package.success == 1): if os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass if (short_circuit is None): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(conf.download_path + "/release", content.release + "\n") # record logcheck output #logcheck(conf.download_path) #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) elif (short_circuit == "prep"): write_out(conf.download_path + "/release", content.release + "\n") #elif (short_circuit == "build"): # record logcheck output #logcheck(conf.download_path) #elif (short_circuit == "install"): ## record logcheck output #logcheck(conf.download_path) elif (short_circuit == "binary"): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) #write_out(conf.download_path + "/release", content.release + "\n") #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) #else: #print("To commit your changes, git add the relevant files and run 'git commit -F commitmsg'") link_new_rpms_here()