def package(args, url, name, archives, workingdir, infile_dict): check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(url, name, args.version, args.target, archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) if args.prep_only: write_prep(workingdir) exit(0) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager, args.mock_config, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(build.uniqueext, tarball.name, tarball.version, tarball.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run build.must_restart += 1 if build.round > 20 or build.must_restart == 0: break save_build_log(build.download_path, build.round) test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED) if args.git: git.commit_to_git(build.download_path) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package(args, url, name, archives, workingdir, infile_dict): """Entry point for building a package with autospec.""" conf = config.Config() check_requirements(args.git) package = build.Build(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package) content = tarball.Content(url, name, args.version, archives, conf) content.process(args.target, filemanager) conf.create_versions(package.download_path, content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path if args.license_only: try: with open( os.path.join(package.download_path, content.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf) exit(0) conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req() conf.parse_config_files(package.download_path, args.bump, filemanager, content, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(package.download_path, content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) requirements.scan_for_configure(_dir, content.name, package.download_path, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(package.download_path, _dir, conf.transforms) add_sources(package.download_path, archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content) filemanager.load_specfile(specfile) load_specfile(conf, specfile) # # If infile is passed, parse it and overwrite the specfile configurations # with the newly found values. # if args.infile: specfile = infile_update_spec.update_specfile(specfile, infile_dict, args.target) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, package.download_path, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(package.download_path) while 1: package.package(filemanager, args.mock_config, args.mock_opts, conf, requirements, content, args.cleanup) filemanager.load_specfile(specfile) specfile.write_spec(package.download_path) filemanager.newfiles_printed = 0 mock_chroot = "/var/lib/mock/clear-{}/root/builddir/build/BUILDROOT/" \ "{}-{}-{}.x86_64".format(package.uniqueext, content.name, content.version, content.release) if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 if package.round > 20 or package.must_restart == 0: break save_mock_logs(package.download_path, package.round) check.check_regression(package.download_path, conf.config_opts['skip_tests']) if package.success == 0: conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass examine_abi(package.download_path, content.name) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(package.download_path + "/release", content.release + "\n") # record logcheck output logcheck(package.download_path) commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content, package) conf.create_buildreq_cache(package.download_path, content.version, requirements.buildreqs_cache) if args.git: git.commit_to_git(package.download_path, conf, content.name, package.success) else: print("To commit your changes, git add the relevant files and " "run 'git commit -F commitmsg'")
def package( args, url, name, archives, archives_from_git, workingdir, download_from_git, branch, redownload_from_git, redownload_archive, force_module, force_fullclone, mock_dir, short_circuit, do_file_restart, ): """Entry point for building a package with autospec.""" conf = config.Config(args.target) conf.parse_config_files_early() if util.debugging: print_debug(f"url 1: {url}") new_archives_from_git = [] name_re_escaped = re.escape(name) # Download the source from git if necessary if download_from_git: giturl = url found_file = False fileslist = None download_file_full_path = "" if util.debugging: print_debug(f"url 2: {url}") print_debug(f"BRANCH 2: {branch}") # filename_re = re.compile(r"^{}{}".format(name, r"(-|-.)(\d+)(\.\d+)+\.tar\.gz")) filename_re = re.compile(r"^{}{}".format(name_re_escaped, r"-.*\.tar\.gz")) if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"package_path 11: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 21: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 11: {download_file_full_path}") print_debug(f"giturl 11: {giturl}") else: package_path = f"packages/{name}" if util.debugging: print_debug(f"package_path 12: {package_path}") fileslist = os.listdir(package_path) fileslist.sort(key=os.path.getmtime) for filename in fileslist: if re.search(filename_re, filename): found_file = True download_file_full_path = "file://{}".format( os.path.abspath(f"{package_path}{filename}")) if util.debugging: print_debug( f"found old package_path 22: {download_file_full_path}" ) break if not found_file or redownload_from_git is True: download_file_full_path = git.git_archive_all( path=package_path, name=name, url=url, branch=branch, force_module=force_module, force_fullclone=force_fullclone, conf=conf) url = download_file_full_path if util.debugging: print_debug( f"download_file_full_path 12: {download_file_full_path}") print_debug(f"giturl 12: {giturl}") else: giturl = "" url = download.do_curl_get_effective_url(url) if archives_from_git: arch_url = [] arch_destination = [] arch_branch = [] arch_submodule = [] arch_forcefullclone = [] if util.debugging: print_debug(f"ARCHIVES_GIT 2: {archives_from_git}") print_debug(f"archives in options.conf: {archives}\n\n") archives_re = re.compile(r"^file:\/\/") index_f = [] for index, url_entry in enumerate(archives): if archives_re.search(url_entry): index_f.append(index) if util.debugging: for x in range(len(index_f) - 1, -1, -1): print_debug( f"rm {index_f[x]}:{archives[index_f[x]]} {index_f[x] + 1}:{archives[index_f[x] + 1]}" ) for x in sorted(range(len(index_f) - 1, -1, -1), reverse=True): del archives[index_f[x]:index_f[x] + 2] if util.debugging: print_debug(f"archives in options.conf: {archives}") for aurl, dest, br, sm, ffc in zip(archives_from_git[::5], archives_from_git[1::5], archives_from_git[2::5], archives_from_git[3::5], archives_from_git[4::5]): arch_url.append(aurl) arch_destination.append(dest) arch_branch.append(br) arch_submodule.append(sm) arch_forcefullclone.append(ffc) if util.debugging: print_debug( f"FOR ZIP {arch_url[-1]} - {arch_destination[-1]} - {arch_branch[-1]} - {arch_submodule[-1]} - {arch_forcefullclone[-1]}" ) for index, new_arch_url in enumerate(arch_url, start=0): found_file = False fileslist = [] download_file_full_path = "" arch_name = os.path.splitext(os.path.basename(new_arch_url))[0] arch_name_re_escaped = re.escape(arch_name) filename_re = re.compile(r"^{}{}".format(arch_name_re_escaped, r"-.*\.tar\.gz")) if util.debugging: print_debug(f"arch_name: {arch_name}") if os.path.basename(os.getcwd()) == name: package_path = "./" if util.debugging: print_debug(f"archive package_path 1: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"filename: {filename.name}") print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 1: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 1: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 1: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) else: package_path = f"packages/{name}" if util.debugging: print_debug(f"archive package_path 2: {package_path}") for filename in os.scandir(package_path): if filename.is_file(): if filename_re.search(filename.name): found_file = True download_file_full_path = "file://{}".format( os.path.abspath( f"{package_path}{filename.name}")) if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"archive found 2: {arch_name} - {download_file_full_path}" ) break if not found_file or redownload_archive is True: if util.debugging: print_debug(f"Index: {index}") print_debug( f"Destination: {arch_destination[index]} - Branch: {arch_branch[index]}" ) print_debug( f"Fazer download archive 2: {arch_name} - {new_arch_url}" ) download_file_full_path = git.git_archive_all( path=package_path, name=arch_name, url=new_arch_url, branch=arch_branch[index], force_module=str_to_bool(arch_submodule[index]), force_fullclone=str_to_bool( arch_forcefullclone[index]), conf=conf) if util.debugging: print_debug( f"archive download_file_full_path 2: {download_file_full_path}" ) if download_file_full_path in archives or arch_destination[ index] in archives: print_info(f"\nAlready in archives: {archives}") else: archives.append(download_file_full_path) archives.append(arch_destination[index]) print_info(f"\nAdding to archives: {archives}") new_archives_from_git.append(arch_url[index]) new_archives_from_git.append(arch_destination[index]) new_archives_from_git.append(arch_branch[index]) new_archives_from_git.append(arch_submodule[index]) new_archives_from_git.append(arch_forcefullclone[index]) if util.debugging: print_debug(f"new_archives_from_git: {new_archives_from_git}\n") #check_requirements(args.git) conf.detect_build_from_url(url) package = build.Build() # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager(conf, package, mock_dir, short_circuit) if util.debugging: print_debug(f"url 4: {url}") print_debug(f"archives 4: {archives}") print_debug(f"new_archives_from_git 4: {new_archives_from_git}") content = tarball.Content(url, name, args.version, archives, conf, workingdir, giturl, download_from_git, branch, new_archives_from_git, force_module, force_fullclone) content.process(filemanager) conf.create_versions(content.multi_version) conf.content = content # hack to avoid recursive dependency on init # Search up one level from here to capture multiple versions _dir = content.path conf.setup_patterns() conf.config_file = args.config requirements = buildreq.Requirements(content.url) requirements.set_build_req(conf) conf.parse_config_files(args.bump, filemanager, content.version, requirements) conf.setup_patterns(conf.failed_pattern_dir) conf.parse_existing_spec(content.name) if args.prep_only: write_prep(conf, workingdir, content) exit(0) if args.license_only: try: with open( os.path.join(conf.download_path, content.name + ".license"), "r", ) as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except Exception: pass # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, name) exit(0) if short_circuit == "prep" or short_circuit is None: requirements.scan_for_configure(_dir, content.name, conf) specdescription.scan_for_description(content.name, _dir, conf.license_translations, conf.license_blacklist) # Start one directory higher so we scan *all* versions for licenses license.scan_for_licenses(os.path.dirname(_dir), conf, content.name) commitmessage.scan_for_changes(conf.download_path, _dir, conf.transforms) conf.add_sources(archives, content) check.scan_for_tests(_dir, conf, requirements, content) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(content.url, content.version, content.name, content.release, conf, requirements, content, mock_dir, short_circuit) filemanager.load_specfile(specfile) load_specfile(conf, specfile) if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(url, conf, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) if short_circuit == "prep" or short_circuit is None: conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) # conf.create_reqs_cache(content.version, requirements.reqs_cache) specfile.write_spec() filemanager.load_specfile_information(specfile, content) if short_circuit == "prep": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/SRPMS/" ) util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/BUILD/" ) if short_circuit == "install": util.call( f"sudo rm -rf {mock_dir}/clear-{content.name}/root/builddir/build/RPMS/" ) while 1: package.package( filemanager, args.mock_config, args.mock_opts, conf, requirements, content, mock_dir, short_circuit, do_file_restart, args.cleanup, ) if (short_circuit != package.short_circuit): print_info(f"short_circuit: {short_circuit}") print_info(f"package.short_circuit: {package.short_circuit}") short_circuit = package.short_circuit print_info(f"new short_circuit: {short_circuit}") filemanager.load_specfile_information(specfile, content) filemanager.load_specfile(specfile) specfile.write_spec() filemanager.newfiles_printed = 0 mock_chroot = f"{mock_dir}/clear-{package.uniqueext}/root/builddir/build/BUILDROOT/{content.name}-{content.version}-{content.release}.x86_64" if filemanager.clean_directories(mock_chroot): # directories added to the blacklist, need to re-run package.must_restart += 1 print_info(f"filemanager.clean_directories({mock_chroot})") if do_file_restart: if package.round > 20 or (package.must_restart == 0 and package.file_restart == 0): if (short_circuit == "install"): print_info(f"short_circuit: {short_circuit}") print_info( f"package.short_circuit: {package.short_circuit}") short_circuit = "binary" print_info(f"new short_circuit: {short_circuit}") continue else: break else: if (package.round > 20 or package.must_restart == 0): break save_mock_logs(conf.download_path, package.round) #if short_circuit is None or short_circuit == "install": #check.check_regression(conf.download_path, conf.config_opts["skip_tests"]) #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) #conf.create_reqs_cache(content.version, requirements.reqs_cache) if package.success == 0: #conf.create_buildreq_cache(content.version, requirements.buildreqs_cache) print_fatal("Build failed, aborting") sys.exit(1) elif (package.success == 1): if os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except Exception: pass if (short_circuit is None): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) write_out(conf.download_path + "/release", content.release + "\n") # record logcheck output #logcheck(conf.download_path) #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) elif (short_circuit == "prep"): write_out(conf.download_path + "/release", content.release + "\n") #elif (short_circuit == "build"): # record logcheck output #logcheck(conf.download_path) #elif (short_circuit == "install"): ## record logcheck output #logcheck(conf.download_path) elif (short_circuit == "binary"): examine_abi(conf.download_path, content.name) #if os.path.exists("/var/lib/rpm"): #print("\nGenerating whatrequires\n") #pkg_scan.get_whatrequires(content.name, conf.yum_conf) #write_out(conf.download_path + "/release", content.release + "\n") #if args.git: #print("\nTrying to guess the commit message\n") #commitmessage.guess_commit_message(pkg_integrity.IMPORTED, conf, content) #git.commit_to_git(conf, content.name, package.success) #else: #print("To commit your changes, git add the relevant files and run 'git commit -F commitmsg'") link_new_rpms_here()
def main(): """ Main function for autospec """ parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", action="store", dest="name", default="", help="Override the package name") parser.add_argument("-v", "--version", action="store", dest="version", default="", help="Override the package version") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument( "-i", "--integrity", action="store_true", default=False, help="Search for package signature from source URL and " "attempt to verify package") parser.add_argument( "--non_interactive", action="store_true", default=False, help="Disable interactive mode for package verification") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error( argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) build.setup_workingdir(workingdir) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # filemanager = files.FileManager() tarball.process(args.url, args.name, args.version, args.target, args.archives, filemanager) _dir = tarball.path if args.license_only: try: with open( os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if ":" not in word: license.add_license(word) except: pass license.scan_for_licenses(_dir) exit(0) config.setup_patterns() config.config_file = args.config config.parse_config_files(build.download_path, args.bump, filemanager) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(_dir) specdescription.scan_for_description(tarball.name, _dir) license.scan_for_licenses(_dir) commitmessage.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) filemanager.load_specfile(specfile) load_specfile(specfile) print("\n") if args.integrity: interactive_mode = not args.non_interactive pkg_integrity.check(args.url, build.download_path, interactive=interactive_mode) pkg_integrity.load_specfile(specfile) specfile.write_spec(build.download_path) while 1: build.package(filemanager) filemanager.load_specfile(specfile) specfile.write_spec(build.download_path) filemanager.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print_fatal("Build failed, aborting") sys.exit(1) elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) if os.path.exists("/var/lib/rpm"): pkg_scan.get_whatrequires(tarball.name) write_out(build.download_path + "/release", tarball.release + "\n") # record logcheck output logcheck(build.download_path) commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)
def main(): parser = argparse.ArgumentParser() parser.add_argument("-g", "--skip-git", action="store_false", dest="git", default=True, help="Don't commit result to git") parser.add_argument("-n", "--name", nargs=1, action="store", dest="name", default="", help="Override the package name") parser.add_argument("url", help="tarball URL (e.g." " http://example.com/downloads/mytar.tar.gz)") parser.add_argument('-a', "--archives", action="store", dest="archives", default=[], nargs='*', help="tarball URLs for additional source archives and" " a location for the sources to be extacted to (e.g." " http://example.com/downloads/dependency.tar.gz" " /directory/relative/to/extract/root )") parser.add_argument("-l", "--license-only", action="store_true", dest="license_only", default=False, help="Only scan for license files") parser.add_argument("-b", "--skip-bump", dest="bump", action="store_false", default=True, help="Don't bump release number") parser.add_argument("-c", "--config", dest="config", action="store", default="/usr/share/defaults/autospec/autospec.conf", help="Set configuration file to use") parser.add_argument("-t", "--target", dest="target", action="store", default=None, help="Target location to create or reuse") parser.add_argument("-i", "--integrity", action="store_true", default=False, help="Attempt to download and verify package signature") args = parser.parse_args() if len(args.archives) % 2 != 0: parser.error(argparse.ArgumentTypeError( "-a/--archives requires an even number of arguments")) check_requirements(args.git) # # First, download the tarball, extract it and then do a set # of static analysis on the content of the tarball. # build.setup_patterns() tarball.name_and_version(args.url, args.name) tarball.download_tarball(args.url, args.name, args.archives, args.target) _dir = tarball.path if args.license_only: try: with open(os.path.join(build.download_path, tarball.name + ".license"), "r") as dotlic: for word in dotlic.read().split(): if word.find(":") < 0: license.add_license(word) except: pass license.scan_for_licenses(name, _dir) exit(0) config.config_file = args.config config.parse_config_files(build.download_path, args.bump) config.parse_existing_spec(build.download_path, tarball.name) buildreq.set_build_req() buildreq.scan_for_configure(name, _dir, build.download_path) specdescription.scan_for_description(name, _dir) license.scan_for_licenses(name, _dir) docs.scan_for_changes(build.download_path, _dir) add_sources(build.download_path, args.archives) test.scan_for_tests(_dir) # # Now, we have enough to write out a specfile, and try to build it. # We will then analyze the build result and learn information until the # package builds # specfile = specfiles.Specfile(tarball.url, tarball.version, tarball.name, tarball.release) load_specfile(specfile) specfile.write_spec(build.download_path) print("\n") if args.integrity == True: pkg_integrity.check(args.url, build.download_path) while 1: build.package() specfile.packages = files.packages specfile.locales = lang.locales specfile.write_spec(build.download_path) files.newfiles_printed = 0 if build.round > 20 or build.must_restart == 0: break test.check_regression(build.download_path) if build.success == 0: print("Build failed") return elif os.path.isfile("README.clear"): try: print("\nREADME.clear CONTENTS") print("*********************") with open("README.clear", "r") as readme_f: print(readme_f.read()) print("*********************\n") except: pass examine_abi(build.download_path) with open(build.download_path + "/release", "w") as fp: fp.write(tarball.release + "\n") commitmessage.guess_commit_message() if args.git: git.commit_to_git(build.download_path)