def parse_catkin_deps(cmakelists_file): """Determine requirements for catkin packages.""" f = util.open_auto(cmakelists_file, "r") lines = f.readlines() pat = re.compile(r"^find_package.*\(.*(catkin)(?: REQUIRED *)?(?:COMPONENTS (?P<comp>.*))?\)$") catkin = False for line in lines: match = pat.search(line) if not match: continue # include catkin's required components comp = match.group("comp") if comp: for curr in comp.split(" "): add_pkgconfig_buildreq(curr) catkin = True # catkin find_package() function will always rely on CMAKE_PREFIX_PATH # make sure we keep it consistent with CMAKE_INSTALL_PREFIX otherwise # it'll never be able to find its modules if catkin: for curr in ["catkin", "catkin_pkg", "empy", "googletest"]: add_buildreq(curr) extra_cmake.add("-DCMAKE_PREFIX_PATH=/usr") extra_cmake.add("-DCATKIN_BUILD_BINARY_PACKAGE=ON") extra_cmake.add("-DSETUPTOOLS_DEB_LAYOUT=OFF")
def description_from_readme(readmefile): """Try to pick the first paragraph or two from the readme file.""" try: with util.open_auto(readmefile, "r") as readmefd: lines = readmefd.readlines() except FileNotFoundError: return section = False desc = "" for line in lines: if section and len(line) < 2 and len(desc) > 80: # If we are in a section and encounter a new line, break as long as # we already have a description > 80 characters. break if not section and len(line) > 2: # Found the first paragraph hopefully section = True if section: # Copy all non-empty lines into the description if skipline(line) == 0 and len(line) > 2: desc = desc + line.strip() + "\n" score = 1.5 if readmefile.lower().endswith("readme") else 1 assign_description(desc, score)
def parse_build_results(filename, returncode, filemanager, config, requirements, content): """Handle build log contents.""" global must_restart global success requirements.verbose = 1 must_restart = 0 infiles = 0 # Flush the build-log to disk, before reading it util.call("sync") with util.open_auto(filename, "r") as buildlog: loglines = buildlog.readlines() for line in loglines: for pat in config.pkgconfig_pats: simple_pattern_pkgconfig(line, *pat, config.config_opts.get('32bit'), requirements) for pat in config.simple_pats: simple_pattern(line, *pat, requirements) for pat in config.failed_pats: failed_pattern(line, config, requirements, *pat) check_for_warning_pattern(line) # Search for files to add to the %files section. # * infiles == 0 before we reach the files listing # * infiles == 1 for the "Installed (but unpackaged) file(s) found" header # and for the entirety of the files listing # * infiles == 2 after the files listing has ended if infiles == 1: for search in ["RPM build errors", "Childreturncodewas", "Child returncode", "Empty %files file"]: if search in line: infiles = 2 for start in ["Building", "Child return code was"]: if line.startswith(start): infiles = 2 if infiles == 0 and "Installed (but unpackaged) file(s) found:" in line: infiles = 1 elif infiles == 1 and "not matching the package arch" not in line: # exclude blank lines from consideration... file = line.strip() if file and file[0] == "/": filemanager.push_file(file, content.name) if line.startswith("Sorry: TabError: inconsistent use of tabs and spaces in indentation"): print(line) returncode = 99 nvr = f"{content.name}-{content.version}-{content.release}" match = f"File not found: /builddir/build/BUILDROOT/{nvr}.x86_64/" if match in line: missing_file = "/" + line.split(match)[1].strip() filemanager.remove_file(missing_file) if line.startswith("Executing(%clean") and returncode == 0: print("RPM build successful") success = 1
def save_system_pgo(self, mock_dir, content_name, config): """Copy chroot profiles to system pgo.""" root_dir_src = "/" system_pgo_dir_src = "/var/tmp/pgo" system_pgo_dir_dst = f"{config.download_path}/pgo.tar.gz" system_gitignore = f"{config.download_path}/.gitignore" tar_cmd = f"tar --directory={root_dir_src} --create --file=- var/tmp/pgo/ | pigz -9 -p 20 > {system_pgo_dir_dst}" if os.path.isdir(system_pgo_dir_src): if any(os.scandir(system_pgo_dir_src)): if os.path.isfile(system_pgo_dir_dst): os.remove(system_pgo_dir_dst) try: process = subprocess.run( tar_cmd, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, universal_newlines=True, ) except subprocess.CalledProcessError as err: print_fatal( f"Unable to archive {system_pgo_dir_src} in {system_pgo_dir_dst} from {tar_cmd}: {err}" ) sys.exit(1) append_new_gitrule = True with util.open_auto(system_gitignore, "r+") as gitignore: for line in gitignore: if "!pgo.tar.gz" in line: append_new_gitrule = False break if append_new_gitrule: gitignore.write("!pgo.tar.gz\n")
def parse_existing_spec(path, name): """Determine the old version, old patch list, old keyid, and cves from old spec file.""" global old_version global old_patches global old_keyid global cves spec = os.path.join(path, "{}.spec".format(name)) if not os.path.exists(spec): return with open_auto(spec, "r") as inp: for line in inp.readlines(): line = line.strip().replace("\r", "").replace("\n", "") if "Source0 file verified with key" in line: keyidx = line.find('0x') + 2 old_keyid = line[keyidx:].split( )[0] if keyidx > 2 else old_keyid if ":" not in line: continue spl = line.split(":") key = spl[0].lower().strip() value = ":".join(spl[1:]).strip() if key == "version": old_version = value elif key.startswith("patch"): old_patches.append(value.lower()) # Ignore nopatch for patch in patches: patch = patch.lower() if patch not in old_patches and patch.endswith( ".patch") and patch.startswith("cve-"): cves.append(patch.upper().split(".PATCH")[0])
def grab_python_requirements(descfile): """Add python requirements from requirements.txt file.""" if "/demo/" in descfile: return if "/doc/" in descfile: return if "/docs/" in descfile: return if "/example/" in descfile: return if "/test/" in descfile: return if "/tests/" in descfile: return with util.open_auto(descfile, "r") as f: lines = f.readlines() for line in lines: # don't add the test section if clean_python_req(line) == '[test]': break if clean_python_req(line) == '[testing]': break if clean_python_req(line) == '[dev]': break if clean_python_req(line) == '[doc]': break if clean_python_req(line) == '[docs]': break if 'pytest' in line: continue if clean_python_req(line) == 'mock': continue add_requires(clean_python_req(line))
def parse_build_results(filename, returncode, filemanager): """Handle build log contents.""" global must_restart global success buildreq.verbose = 1 must_restart = 0 infiles = 0 # Flush the build-log to disk, before reading it util.call("sync") with util.open_auto(filename, "r") as buildlog: loglines = buildlog.readlines() for line in loglines: for pat in config.pkgconfig_pats: simple_pattern_pkgconfig(line, *pat) for pat in config.simple_pats: simple_pattern(line, *pat) for pat in config.failed_pats: failed_pattern(line, *pat) check_for_warning_pattern(line) # search for files to add to the %files section # track with infiles. If infiles == 1 we found the header # "Installed (but unpackaged) file(s) found" in the build log # This tells us to look in the next line. Increment infiles if we don't # find a file in the next line. if infiles == 1: for search in ["RPM build errors", "Childreturncodewas", "Child returncode", "Empty %files file"]: if search in line: infiles = 2 for start in ["Building", "Child return code was"]: if line.startswith(start): infiles = 2 if "Installed (but unpackaged) file(s) found:" in line: infiles = 1 elif infiles == 1 and "not matching the package arch" not in line: # exclude blank lines from consideration... file = line.strip() if file: filemanager.push_file(file) if line.startswith("Sorry: TabError: inconsistent use of tabs and spaces in indentation"): print(line) returncode = 99 if "File not found: /builddir/build/BUILDROOT/" in line: left = "File not found: /builddir/build/BUILDROOT/%s-%s-%s.x86_64/" % (tarball.name, tarball.version, tarball.release) missing_file = "/" + line.split(left)[1].strip() filemanager.remove_file(missing_file) if line.startswith("Executing(%clean") and returncode == 0: print("RPM build successful") success = 1
def parse_existing_spec(self, path, name): """Determine the old version, old patch list, old keyid, and cves from old spec file.""" spec = os.path.join(path, "{}.spec".format(name)) if not os.path.exists(spec): return found_old_version = False found_old_patches = False ver_regex = r"^Version *: *(.*) *$" patch_regex = r"^Patch[0-9]* *: *(.*) *$" # If git history exists, read the Version and Patch* spec header fields # from the latest commit to take priority over the working copy. cmd = ["git", "-C", path, "grep", "-E", "-h", ver_regex, "HEAD", spec] result = subprocess.run(cmd, capture_output=True) if result.returncode == 0: # The first matching line is from the spec header (hopefully) line = result.stdout.decode().split("\n")[0] m = re.search(ver_regex, line) if m: self.old_version = m.group(1) found_old_version = True cmd = [ "git", "-C", path, "grep", "-E", "-h", patch_regex, "HEAD", spec ] result = subprocess.run(cmd, capture_output=True) if result.returncode == 0: lines = result.stdout.decode().split("\n") for line in lines: m = re.search(patch_regex, line) if m: self.old_patches.append(m.group(1).lower()) found_old_patches = True with open_auto(spec, "r") as inp: for line in inp.readlines(): line = line.strip().replace("\r", "").replace("\n", "") if "Source0 file verified with key" in line: keyidx = line.find('0x') + 2 self.old_keyid = line[keyidx:].split( )[0] if keyidx > 2 else self.old_keyid # As a fallback, read the Version and Patch* header fields from the # working copy of the spec, in case a git repo does not exist. m = re.search(ver_regex, line) if m and not found_old_version: self.old_version = m.group(1) found_old_version = True m = re.search(patch_regex, line) if m and not found_old_patches: self.old_patches.append(m.group(1).lower()) # Ignore nopatch for patch in self.patches: patch = patch.lower() if patch not in self.old_patches and patch.endswith( ".patch") and patch.startswith("cve-"): self.cves.append(patch.upper().split(".PATCH")[0])
def get_python_build_version_from_classifier(filename): """Detect if setup should use distutils3 only. Uses "Programming Language :: Python :: [2,3] :: Only" classifiers in the setup.py file. Defaults to distutils3 if no such classifiers are found. """ with util.open_auto(filename) as setup_file: data = setup_file.read() if "Programming Language :: Python :: 3 :: Only" in data: return "distutils3" return "distutils3"
def summary_from_R(pkgfile): """Parse DESCRIPTION file for Title: lines.""" try: with util.open_auto(pkgfile, "r") as pkgfd: lines = pkgfd.readlines() except FileNotFoundError: return for line in lines: if line.startswith("Title:"): assign_summary(line[7:], 3) # Score will not increase, stop trying break
def grab_python_requirements(self, descfile, packages): """Add python requirements from requirements.txt file.""" with util.open_auto(descfile, "r") as f: lines = f.readlines() for line in lines: if '[' in line: break clean_line = clean_python_req(line) if 'pytest' in line: continue if clean_line: if self.add_buildreq(f"pypi({clean_line})"): self.add_requires(f"pypi({clean_line})", packages, override=True, subpkg="python3")
def summary_from_pkgconfig(pkgfile, package): """Parse pkgconfig files for Description: lines.""" try: with util.open_auto(pkgfile, "r") as pkgfd: lines = pkgfd.readlines() except FileNotFoundError: return score = 3 if package + ".pc" in pkgfile else 2 for line in lines: if line.startswith("Description:"): assign_summary(line[13:], score) # Score will not increase, stop trying break
def rakefile(self, filename, gems): """Scan Rakefile for build requirements.""" with util.open_auto(filename, "r") as f: lines = f.readlines() pat = re.compile(r"^require '(.*)'$") for line in lines: match = pat.search(line) if match: s = match.group(1) if s != "rubygems" and s in gems: print("Rakefile-dep: " + gems[s]) self.add_buildreq(gems[s]) else: print("Rakefile-new: rubygem-" + s)
def parse_buildroot_log(self, filename, returncode): """Handle buildroot log contents.""" if returncode == 0: return True self.must_restart = 0 is_clean = True util.call("sync") with util.open_auto(filename, "r") as rootlog: loglines = rootlog.readlines() missing_pat = re.compile(r"^.*No matching package to install: '(.*)'$") for line in loglines: match = missing_pat.match(line) if match is not None: util.print_fatal("Cannot resolve dependency name: {}".format(match.group(1))) is_clean = False return is_clean
def parse_cargo_toml(filename): """Update build requirements using Cargo.toml. Set the build requirements for building rust programs using cargo. """ global cargo_bin buildpattern.set_build_pattern("cargo", 1) add_buildreq("rustc") with util.open_auto(filename, "r") as ctoml: cargo = toml.loads(ctoml.read()) if cargo.get("bin") or os.path.exists(os.path.join(os.path.dirname(filename), "src/main.rs")): cargo_bin = True if not cargo.get("dependencies"): return for cdep in cargo["dependencies"]: if add_buildreq(cdep): add_requires(cdep)
def parse_r_description(self, filename, packages): """Update build/runtime requirements according to the R package description.""" deps = [] with util.open_auto(filename, "r") as desc: content = desc.read() deps = _get_desc_field("Depends", content) deps.extend(_get_desc_field("Imports", content)) deps.extend(_get_desc_field("LinkingTo", content)) r_provides = _get_r_provides() for dep in deps: if dep == "R": continue if dep in r_provides: continue pkg = "R-" + dep self.add_buildreq(pkg) self.add_requires(pkg, packages)
def qmake_profile(filename): """Scan .pro file for build requirements.""" with util.open_auto(filename, "r") as f: lines = f.readlines() pat = re.compile(r"(QT|QT_PRIVATE|QT_FOR_CONFIG).*=\s*(.*)\s*") for line in lines: match = pat.search(line) if not match: continue s = match.group(2) for module in s.split(): module = re.sub('-private$', '', module) try: pc = config.qt_modules[module] add_buildreq('pkgconfig({})'.format(pc)) except Exception: pass
def qmake_profile(self, filename, qt_modules, cache=False): """Scan .pro file for build requirements.""" with util.open_auto(filename, "r") as f: lines = f.readlines() pat = re.compile(r"(QT|QT_PRIVATE|QT_FOR_CONFIG).*=\s*(.*)\s*") for line in lines: match = pat.search(line) if not match: continue s = match.group(2) for module in s.split(): module = re.sub("-private$", "", module) try: pc = qt_modules[module] self.add_buildreq("pkgconfig({})".format(pc), cache=cache) except Exception: pass
def parse_r_description(filename): """Update build/runtime requirements according to the R package description.""" deps = [] with util.open_auto(filename, "r") as desc: content = desc.read() deps = _get_desc_field("Depends", content) deps.extend(_get_desc_field("Imports", content)) r_provides = _get_r_provides() for dep in deps: if dep == 'R': continue if dep in r_provides: continue pkg = 'R-' + dep if pkg in config.os_packages: add_buildreq(pkg) add_requires(pkg) else: print("CRAN package '{}' not found in os_packages, skipping".format(pkg))
def description_from_spec(specfile): """Parse any existing RPM specfiles.""" try: with util.open_auto(specfile, 'r') as specfd: lines = specfd.readlines() except FileNotFoundError: return specdesc = "" section = False for line in lines: if line.startswith("#"): continue if line.startswith("%"): section = False excludes = ["Copyright", "see ", "("] if line.startswith("License:") and not any(e in line for e in excludes): splits = line.split(":")[1:] words = ":".join(splits).strip() if words in config.license_translations: print("Adding license from spec:", words) license.add_license(words) else: words = clean_license_string(words).split() for word in words: if ":" not in word and not word.startswith("@"): print("Adding license from spec:", word) license.add_license(word) if line.startswith("Summary: "): assign_summary(line[9:], 4) specdesc += line if section else "" # Check for %description after assigning the line to specdesc so the # %description string is not included if line.endswith("%description\n"): section = True if len(specdesc) > 10: assign_description(specdesc, 4)
def description_from_pkginfo(pkginfo): """Parse existing package info files.""" try: with util.open_auto(pkginfo, 'r') as pkgfd: lines = pkgfd.readlines() except FileNotFoundError: return pkginfo = "" section = False for line in lines: if ":" in line and section: section = False excludes = ["Copyright", "see "] if line.lower().startswith("license:") and not any(e in line for e in excludes): splits = line.split(":")[1:] words = ":".join(splits).strip() if words in config.license_translations: print("Adding license from PKG-INFO:", words) license.add_license(words) else: words = clean_license_string(words).split() for word in words: if ":" not in word: print("Adding license from PKG-INFO:", word) license.add_license(word) for sub in ["Summary: ", "abstract: "]: if line.startswith(sub): assign_summary(line[len(sub):].strip(), 4) pkginfo += line if section else "" if line.startswith("Description:"): section = True if len(pkginfo) > 10: assign_description(pkginfo, 4)
def parse_cmake(self, filename, cmake_modules, conf32): """Scan a .cmake or CMakeLists.txt file for what's it's actually looking for.""" findpackage = re.compile(r"^[^#]*find_package\((\w+)\b.*\)", re.I) pkgconfig = re.compile(r"^[^#]*pkg_check_modules\s*\(\w+ (.*)\)", re.I) pkg_search_modifiers = { 'REQUIRED', 'QUIET', 'NO_CMAKE_PATH', 'NO_CMAKE_ENVIRONMENT_PATH', 'IMPORTED_TARGET' } extractword = re.compile(r'(?:"([^"]+)"|(\S+))(.*)') with util.open_auto(filename, "r") as f: lines = f.readlines() for line in lines: match = findpackage.search(line) if match: module = match.group(1) try: pkg = cmake_modules[module] self.add_buildreq(pkg) except Exception: pass match = pkgconfig.search(line) if match: rest = match.group(1) while rest: wordmatch = extractword.search(rest) if not wordmatch: break rest = wordmatch.group(3) if wordmatch.group(2) in pkg_search_modifiers: continue # Only one of the two groups can match at a time module = wordmatch.group(1) if not module: module = wordmatch.group(2) # We have a match, so strip out any version info for m in parse_modules_list(module, is_cmake=True): self.add_pkgconfig_buildreq(m, conf32)
def parse_configure_ac(self, filename, config, cache=False): """Parse the configure.ac file for build requirements.""" buf = "" depth = 0 # print("Configure parse: ", filename) config.set_build_pattern("configure_ac", 1) f = util.open_auto(filename, "r") while 1: c = f.read(1) if not c: break if c == "(": depth += 1 if c == ")" and depth > 0: depth -= 1 if c != "\n": buf += c if c == "\n" and depth == 0: self.configure_ac_line(buf, config.config_opts.get("32bit"), cache=cache) buf = "" self.configure_ac_line(buf, config.config_opts.get("32bit"), cache=cache) f.close()
def parse_configure_ac(filename): """Parse the configure.ac file for build requirements.""" buf = "" depth = 0 # print("Configure parse: ", filename) buildpattern.set_build_pattern("configure_ac", 1) f = util.open_auto(filename, "r") while 1: c = f.read(1) if not c: break if c == "(": depth += 1 if c == ")" and depth > 0: depth -= 1 if c != "\n": buf += c if c == "\n" and depth == 0: configure_ac_line(buf) buf = "" configure_ac_line(buf) f.close()
def grab_python_requirements(self, descfile, packages, cache=False): """Add python requirements from requirements.txt file.""" if "/demo/" in descfile: return if "/doc/" in descfile: return if "/docs/" in descfile: return if "/example/" in descfile: return if "/test/" in descfile: return if "/tests/" in descfile: return with util.open_auto(descfile, "r") as f: lines = f.readlines() for line in lines: # don't add the test section if clean_python_req(line) == "[test]": break if clean_python_req(line) == "[testing]": break if clean_python_req(line) == "[dev]": break if clean_python_req(line) == "[doc]": break if clean_python_req(line) == "[docs]": break if "pytest" in line: continue if clean_python_req(line) == "mock": continue print(f"Adding additional (python) requirement: {clean_python_req(line)}") self.add_requires(clean_python_req(line), packages, cache=cache)
def add_setup_py_requires(self, filename, packages, cache=False): """Detect build requirements listed in setup.py in the install_requires and setup_requires lists. Handles the following patterns: install_requires='one' install_requires=['one', 'two', 'three'] install_requires=['one', 'two', 'three'] setup_requires=[ 'one>=2.1', # >=2.1 is removed 'two', 'three' ] setuptools.setup( setup_requires=['one', 'two'], ...) setuptools.setup(setup_requires=['one', 'two'], ...) Does not evaluate variables for security purposes """ multiline = False req = "" with util.open_auto(filename) as f: lines = f.readlines() for line in lines: if not multiline and ("install_requires" in line or "setup_requires" in line): req = "install_requires" in line # find the value for *_requires line = line.split("=", 1) if len(line) == 2: line = line[1].strip() else: # skip because this could be a conditionally extended list # we only want to automatically detect the core packages continue # easy, one-line case if line.startswith("[") and "]" in line: # remove the leading [ and split off everthing after the ] line = line[1:].split("]")[0] for item in line.split(","): item = item.strip() try: # eval the string and add requirements dep = clean_python_req(ast.literal_eval(item), False) print(f"Adding additional build (python setup) requirement: {dep}") self.add_buildreq(dep, cache=cache) if req: print(f"Adding additional (python setup) requirement: {dep}") self.add_requires(dep, packages, cache=cache) except Exception: # do not fail, the line contained a variable and # had to be skipped pass continue # more complicated, multi-line list. # this sets the py_dep_string with the current line, which # is the beginning of a multi-line list. elif line.startswith("["): multiline = True line = line.lstrip("[") # if the line doesn't start with '[' it is the case where # there is (should be) a single dependency as a string else: line = line.strip() try: dep = clean_python_req(ast.literal_eval(line), False) print(f"Adding additional build (python setup) requirement: {dep}") self.add_buildreq(dep, cache=cache) if req: print(f"Adding additional (python setup) requirement: {dep}") self.add_requires(dep, packages, cache=cache) except Exception: # Do not fail, just keep looking pass continue # if multiline was set above when a multi-line list was # detected, for each line until the end bracket is found attempt to # add the line as a buildreq if multiline: # if end bracket found, reset the flag if "]" in line: multiline = False line = line.split("]")[0] try: dep = ast.literal_eval(line.split("#")[0].strip(" ,\n")) dep = clean_python_req(dep) print(f"Adding additional build (python setup) requirement: {dep}") self.add_buildreq(dep, cache=cache) if req: print(f"Adding additional (python setup) requirement: {dep}") self.add_requires(dep, packages, cache=cache) except Exception: # do not fail, the line contained a variable and had to # be skipped pass
def add_pyproject_requires(self, filename): """Detect build requirements listed in pyproject.toml in the build-system's requires lists.""" with util.open_auto(filename) as pfile: pyproject = toml.loads(pfile.read()) if not (buildsys := pyproject.get("build-system")): return
def parse_build_results(self, filename, returncode, filemanager, config, requirements, content): """Handle build log contents.""" requirements.verbose = 1 self.must_restart = 0 self.file_restart = 0 infiles = 0 patch_name = "" # Flush the build-log to disk, before reading it util.call("sync") with util.open_auto(filename, "r") as buildlog: loglines = buildlog.readlines() for line in loglines: #if patch_name_match := self.patch_name_line.search(line): #patch_name = patch_name_match.groups()[0] #if patch_name: #if self.patch_fail_line.search(line): #self.must_restart += config.remove_backport_patch(patch_name) if (self.short_circuit != "prep" and self.short_circuit != "binary"): for pat in config.pkgconfig_pats: self.simple_pattern_pkgconfig( line, *pat, config.config_opts.get('32bit'), requirements) for pat in config.simple_pats: self.simple_pattern(line, *pat, requirements) for pat in config.failed_pats: self.failed_pattern(line, config, requirements, *pat) for pat in config.failed_exit_pats: self.failed_exit_pattern(line, config, requirements, *pat) # check_for_warning_pattern(line) # Search for files to add to the %files section. # * infiles == 0 before we reach the files listing # * infiles == 1 for the "Installed (but unpackaged) file(s) found" header # and for the entirety of the files listing # * infiles == 2 after the files listing has ended if infiles == 1: for search in [ "RPM build errors", "Childreturncodewas", "Child returncode", "Empty %files file" ]: if search in line: infiles = 2 for start in ["Building", "Child return code was"]: if line.startswith(start): infiles = 2 if infiles == 0 and "Installed (but unpackaged) file(s) found:" in line: infiles = 1 filemanager.fix_broken_pkg_config_versioning(content.name) if config.config_opts["altcargo1"] or config.config_opts[ "altcargo_pgo"]: filemanager.write_cargo_find_install_assets(content.name) # elif infiles == 1 and "not matching the package arch" not in line: elif infiles == 1: # exclude blank lines from consideration... file = line.strip() if file and file[0] == "/": filemanager.push_file(file, content.name) print(file) if line.startswith( "Sorry: TabError: inconsistent use of tabs and spaces in indentation" ): print(line) returncode = 99 match = f"File not found: /builddir/build/BUILDROOT/{content.name}-{content.version}-{content.release}.x86_64/" if match in line: missing_file = "/" + line.split(match)[1].strip() filemanager.remove_file(missing_file) if line.startswith("Executing(%clean") and returncode == 0: if self.short_circuit == "binary": print("RPM binary build successful") self.success = 1 elif self.short_circuit is None: print("RPM build successful") self.success = 1 if line.startswith("Child return code was: 0") and returncode == 0: if self.short_circuit == "prep": print("RPM short circuit prep build successful") self.success = 1 elif self.short_circuit == "build": print("RPM build build successful") self.success = 1 elif self.short_circuit == "install": print("RPM install build successful") self.success = 1 if self.success == 1 and self.short_circuit == "build" and config.config_opts.get( "altflags_pgo_ext"): if config.config_opts.get("altflags_pgo_ext_phase"): self.save_system_pgo(self.mock_dir, content.name, config) else: self.copy_to_system_pgo(self.mock_dir, content.name, config)
def parse_cargo_toml(self, filename, config): """Update build requirements using Cargo.toml. Set the build requirements for building rust programs using cargo. """ config.set_build_pattern("cargo", 1) if config.default_pattern != "cargo": return self.add_buildreq("asciidoctor") self.add_buildreq("asciidoctor-bin") self.add_buildreq("asciidoctor-dev") self.add_buildreq("binutils-dev") self.add_buildreq("binutils-extras") self.add_buildreq("buildreq-cmake") self.add_buildreq("buildreq-distutils3") self.add_buildreq("ca-certs") self.add_buildreq("ca-certs-static") self.add_buildreq("doxygen") self.add_buildreq("elfutils-dev") self.add_buildreq("git") self.add_buildreq("googletest-dev") self.add_buildreq("grep") self.add_buildreq("intltool") self.add_buildreq("intltool-dev") self.add_buildreq("libedit") self.add_buildreq("libedit-dev") self.add_buildreq("libffi-dev") self.add_buildreq("libffi-staticdev") self.add_buildreq("libxml2-dev") self.add_buildreq("libxml2-staticdev") self.add_buildreq("ncurses-dev") self.add_buildreq("openssl") self.add_buildreq("openssl-dev") self.add_buildreq("ruby") self.add_buildreq("rustc") self.add_buildreq("rustc-bin") self.add_buildreq("rustc-data") self.add_buildreq("rustc-dev") self.add_buildreq("rustc-staticdev") self.add_buildreq("termcolor") self.add_buildreq("time") self.add_buildreq("cargo-edit") self.add_buildreq("pandoc") self.add_buildreq("pandocfilters") self.add_buildreq("just") self.add_buildreq("llvm-staticdev") self.add_buildreq("llvm-man") self.add_buildreq("llvm-libexec") self.add_buildreq("llvm-lib") self.add_buildreq("llvm-dev") self.add_buildreq("llvm-data") self.add_buildreq("llvm-bin") self.add_buildreq("llvm") self.add_buildreq("gcc") self.add_buildreq("gcc-dev32") self.add_buildreq("gcc-dev") self.add_buildreq("gcc-doc") self.add_buildreq("gcc-go") self.add_buildreq("gcc-go-lib") self.add_buildreq("gcc-libgcc32") self.add_buildreq("gcc-libstdc++32") self.add_buildreq("gcc-libs-math") self.add_buildreq("gcc-libubsan") self.add_buildreq("gcc-locale") self.add_buildreq("libgcc1") self.add_buildreq("libstdc++") self.add_buildreq("glibc-bin") self.add_buildreq("glibc-dev32") self.add_buildreq("glibc-dev") self.add_buildreq("glibc-doc") self.add_buildreq("glibc-extras") self.add_buildreq("glibc-libc32") self.add_buildreq("glibc-lib-avx2") self.add_buildreq("glibc-locale") self.add_buildreq("glibc-nscd") self.add_buildreq("glibc-staticdev") self.add_buildreq("glibc-utils") self.add_buildreq("glibc") with util.open_auto(filename, "r") as ctoml: cargo = toml.loads(ctoml.read()) if cargo.get("bin") or os.path.exists(os.path.join(os.path.dirname(filename), "src/main.rs")): self.cargo_bin = True if not cargo.get("dependencies"): return if not config.config_opts["altcargo1"]: for cdep in cargo["dependencies"]: if self.add_buildreq(cdep): self.add_requires(cdep, config.os_packages)
def process_NEWS(newsfile, old_version): """Parse the newfile for relevent changes. Look for changes and CVE fixes relevant to current version update. This information is returned as a tuple: (commitmessage, cves). A maximum of 15 lines from the newsfile is returned in the commitmessage. If the newsfile information is truncated to 15 lines an additional line is added "(NEWS truncated at 15 lines)" """ commitmessage = [] cves = set() start = 0 stop = 0 success = False start_found = False if old_version is None or old_version == tarball.version: # no version update, so no information to search for in newsfile return commitmessage, cves try: with util.open_auto(os.path.join(build.download_path, newsfile)) as f: newslines = f.readlines() except EnvironmentError: return commitmessage, cves newslines = [news.rstrip('\n') for news in newslines] # escape some values for use in regular expressions below escaped_curver = re.escape(tarball.version) escaped_oldver = re.escape(old_version) escaped_tarname = re.escape(tarball.name) # these are patterns that define the beginning of a block of information # regarding the current version. news_start = [ r'Version.*{}'.format(escaped_curver), r'(v|- )?{}:?'.format(escaped_curver), r'{}-{}:?'.format(escaped_tarname, escaped_curver), r'{} 20'.format(escaped_curver) ] # these are patterns that define the end of a block of information # regarding the current version. news_end = [ r'\*\*\* Changes in.*{}'.format(escaped_oldver), r'{}.*201'.format(escaped_oldver), r'Version.*{}'.format(escaped_oldver), r'^Overview of changes leading to {}'.format(escaped_oldver), r'^{}(-| ){}:?'.format(escaped_tarname, escaped_oldver), r'v?{}:?'.format(escaped_oldver) ] for idx, news in enumerate(newslines): # only check headers for begin and end patterns if is_header(newslines, idx): for pat in news_start: if find_in_line(pat, news): start = idx start_found = True break if start_found: for pat in news_end: if find_in_line(pat, news): success = True stop = idx - 1 # stop before this header break if start_found and success: break if not success or stop <= start: return commitmessage, cves # now search for CVEs pat = re.compile(r"(CVE\-[0-9]+\-[0-9]+)") for news in newslines[start:stop]: match = pat.search(news) if match: s = match.group(1) cves.add(s) # compile commitmessage to return commitmessage.append("") for news in newslines[start:min(start + 15, stop)]: commitmessage.append(news) if stop > start + 15: # append message that news was truncated commitmessage.extend(["", "(NEWS truncated at 15 lines)"]) commitmessage.append("") return commitmessage, cves