def _load_installed_packages(self): """Visit rpmdb and load data from installed packages.""" log.info("reading installed packages.") if self._rpm_dbpath is not None: rpm.addMacro("_dbpath", self._rpm_dbpath) for pkg in rpm.ts().dbMatch(): # TODO Load capabilities information in the same manner # Media.list_medias() will return. pkgdict = {} for attr in ( "name", "version", "release", "arch", "epoch", "size", "group", "summary", "installtime", "disttag", "distepoch", ): value = pkg[attr] if type(value) is list and len(value) == 0: value = "" pkgdict[attr] = value if type(pkg["installtime"]) is list: pkgdict["installtime"] = pkg["installtime"][0] if pkgdict["epoch"] is None: pkgdict["epoch"] = 0 self._on_package_data(pkgdict) rpm.delMacro("_dbpath")
def testNetSharedPath(self): # CNY-3503 p = self.addRPMComponent('netshared:rpm=1.0-1', 'netshared-1.0-1.noarch.rpm') util.mkdirChain(self.cfg.root + '/etc/rpm/') #self.updatePkg('netshared:rpm=1.0-1') #self.verifyFile(self.rootDir + '/local/shouldexist') ##self.verifyFile(self.rootDir + '/excluded/shouldnotexist') self.resetRoot() util.mkdirChain(self.cfg.root + '/etc/rpm/') util.mkdirChain(self.cfg.root + '/excluded') self.writeFile(self.cfg.root + '/excluded/shouldnotexist', 'unmanaged') # inside the method to let @conary_test.rpm keep the import conditional import rpm rpm.addMacro('_netsharedpath', '/excluded:/usr/local') try: self.updatePkg('netshared:rpm=1.0-1') finally: rpm.delMacro('_netsharedpath') self.verifyFile(self.rootDir + '/local/shouldexist') self.verifyFile(self.rootDir + '/excluded/shouldnotexist', 'unmanaged') self.erasePkg(self.rootDir, 'netshared:rpm')
def run(self): # Push user-supplied macro definitions for spec parsing for macro in self.opts.define: rpm.addMacro(macro[0], macro[1]) pkg_errors = False for pkgspec in self.opts.packages: try: if self.opts.srpm: self._src_deps(pkgspec) elif self.opts.spec: self._spec_deps(pkgspec) elif pkgspec.endswith('.src.rpm') or pkgspec.endswith('nosrc.rpm'): self._src_deps(pkgspec) elif pkgspec.endswith('.spec'): self._spec_deps(pkgspec) else: self._remote_deps(pkgspec) except dnf.exceptions.Error as e: logger.error(e) pkg_errors = True # Pop user macros so they don't affect future rpm calls for macro in self.opts.define: rpm.delMacro(macro[0]) if pkg_errors: raise dnf.exceptions.Error(_("Some packages could not be found."))
def rpm_macros(**keys): for key, value in keys.iteritems(): log.debug('setting...') log.debug(key + ' ' + value) rpm.addMacro(key, value) yield for key, value in keys.iteritems(): rpm.delMacro(key)
def update_macros(): ''' Update build macros from mock target configuration. ''' macros = ['%dist', '%rhel', '%fedora', '%_build_arch', '%_arch'] for macro in macros: expanded = Mock.get_macro(macro, self, flags) if not expanded.startswith('%'): rpm.delMacro(macro[1:]) rpm.addMacro(macro[1:], expanded)
def rpm_macros(macros): """Context manager to add and remove all macros in the dictionary""" if macros is None: macros = OrderedDict() for key, value in macros.items(): rpm.addMacro(key, value) yield for key, _ in reversed(macros.items()): rpm.delMacro(key)
def readkeys(self, keys=[]): rpm.addMacro('_dbpath', self.dbdir) for key in keys: try: self.readkey(key) except KeyError as e: print(e) if not len(self.imported): raise KeyError('', "no key imported") rpm.delMacro("_dbpath")
def rpm_macros(*macros): """ Context manager to add and remove stacked RPM macro 'environments'. Macro definitions which occur later in 'macros' override definitions made earlier. """ for macro in macros: for key, value in macro.items(): rpm.addMacro(key, value) yield for macro in reversed(macros): for key in macro.keys(): rpm.delMacro(key)
def srpmNameFromSpec( spec ): h = spec.sourceHeader if buildType() == "rpm": rpm.addMacro( 'NAME', h['name'] ) else: rpm.addMacro( 'NAME', mappkgname.map_package(h['name'])[0] ) rpm.addMacro( 'VERSION', h['version'] ) rpm.addMacro( 'RELEASE', h['release'] ) rpm.addMacro( 'ARCH', 'src' ) # There doesn't seem to be a macro for the name of the source # rpm, but the name appears to be the same as the rpm name format. # Unfortunately expanding that macro gives us a leading 'src' that we # don't want, so we strip that off if buildType() == "rpm": srpmname = os.path.basename( rpm.expandMacro( rpmfilenamepat ) ) else: srpmname = os.path.basename( rpm.expandMacro( "%{NAME}_%{VERSION}-%{RELEASE}.dsc" ) ) rpm.delMacro( 'NAME' ) rpm.delMacro( 'VERSION' ) rpm.delMacro( 'RELEASE' ) rpm.delMacro( 'ARCH' ) # HACK: rewrite %dist if it appears in the filename return srpmname.replace( chroot_dist, host_dist )
def files_from_pkg(basename, pkg, specpath): # should be able to build this from the files sections - can't find how # to get at them from the spec object res = "" files = rpmextra.files_from_spec(basename, specpath) for filename in files.get(pkg.header['name'], []): # Debian packages must not contain compiled Python files. # Instead, the python2 helper arranges to compile these # files when they are installed. if os.path.splitext(filename)[1] in [".pyc", ".pyo"]: continue rpm.addMacro("_libdir", "usr/lib") rpm.addMacro("_bindir", "usr/bin") # deb just wants relative paths src = rpm.expandMacro(filename).lstrip("/") rpm.delMacro("_bindir") rpm.delMacro("_libdir") rpm.addMacro("_libdir", "/usr/lib") rpm.addMacro("_bindir", "/usr/bin") dst = rpm.expandMacro(filename) # destination paths should be directories, not files. # if the file is foo and the path is /usr/bin/foo, the # package will end up install /usr/bin/foo/foo if not dst.endswith("/"): dst = os.path.dirname(dst) rpm.delMacro("_bindir") rpm.delMacro("_libdir") res += "%s %s\n" % (src, dst) return res
def srpm_name_from_spec(spec): hdr = spec.sourceHeader rpm.addMacro('NAME', map_package_name(hdr['name'])[0]) rpm.addMacro('VERSION', hdr['version']) rpm.addMacro('RELEASE', hdr['release']) rpm.addMacro('ARCH', 'src') # There doesn't seem to be a macro for the name of the source # rpm, but the name appears to be the same as the rpm name format. # Unfortunately expanding that macro gives us a leading 'src' that we # don't want, so we strip that off if build_type() == "rpm": srpmname = os.path.basename(rpm.expandMacro(RPMFILENAMEPAT)) else: srpmname = os.path.basename( rpm.expandMacro("%{NAME}_%{VERSION}-%{RELEASE}.dsc")) rpm.delMacro('NAME') rpm.delMacro('VERSION') rpm.delMacro('RELEASE') rpm.delMacro('ARCH') # HACK: rewrite %dist if it appears in the filename return srpmname.replace(CHROOT_DIST, HOST_DIST)
def rpm_macros(*macros): """ Context manager to add and remove stacked RPM macro 'environments'. Macro definitions which occur later in 'macros' override definitions made earlier. """ for macro in macros: for key, value in macro.items(): if isinstance(value, bytes): value = value.decode() rpm.addMacro(key, value) yield for macro in reversed(macros): for key in macro.keys(): rpm.delMacro(key)
def set_macro(self, macro, value): if not RPM_AVAILABLE: raise exception.RpmModuleNotAvailable() rex = self.RE_MACRO_BASE.format(re.escape(macro)) rpm.delMacro(macro) if value: # replace self._txt, n = re.subn(r'^(%s).*$' % rex, r'\g<1>%s' % value, self.txt, flags=re.M) if n < 1: # create new self._txt = u'%global {0} {1}\n{2}'.format( macro, value, self.txt) rpm.addMacro(macro, value) else: # remove self._txt = re.sub(r'(^|\n)%s[^\n]+\n?' % rex, r'\g<1>', self.txt)
def read_from_db(package, rpmdb_path=None, glob=False): if rpmdb_path is not None: rpm.addMacro('_dbpath', rpmdb_path) ts = rpm.TransactionSet() ts.openDB() if rpmdb_path is not None: rpm.delMacro('_dbpath') try: if glob: l = ts.dbMatch(rpm.RPMDBI_LABEL) if package != '*': l.pattern('name', rpm.RPMMIRE_GLOB, package) else: l = ts.dbMatch(rpm.RPMDBI_LABEL, package) return l except rpm.error as e: raise Error(str(e)) from e
def set_macro(self, macro, value): rex = self.RE_MACRO_BASE.format(re.escape(macro)) rpm.delMacro(macro) if value: # replace self._txt, n = re.subn(r'^(%s).*$' % rex, '\g<1>%s' % value, self.txt, flags=re.M) if n < 1: # create new self._txt = '%global {0} {1}\n{2}'.format( macro, value, self.txt) rpm.addMacro(macro, value) else: # remove self._txt = re.sub(r'(^|\n)%s[^\n]+\n?' % rex, '\g<1>', self.txt)
def rpmNameFromHeader( h ): rpm.addMacro( 'NAME', h['name'] ) rpm.addMacro( 'VERSION', h['version'] ) rpm.addMacro( 'RELEASE', h['release'] ) rpm.addMacro( 'ARCH', h['arch'] ) rpmname = rpm.expandMacro( rpmfilenamepat ) rpm.delMacro( 'NAME' ) rpm.delMacro( 'VERSION' ) rpm.delMacro( 'RELEASE' ) rpm.delMacro( 'ARCH' ) return rpmname
def run(self, args): if self.opts.help_cmd: return # Push user-supplied macro definitions for spec parsing for macro in self.opts.define: rpm.addMacro(macro[0], macro[1]) for pkgspec in self.opts.packages: if pkgspec.endswith('.src.rpm') or pkgspec.endswith('nosrc.rpm'): self._src_deps(pkgspec) elif pkgspec.endswith('.spec'): self._spec_deps(pkgspec) else: self._remote_deps(pkgspec) # Pop user macros so they don't affect future rpm calls for macro in self.opts.define: rpm.delMacro(macro[0])
def __init__(self, confPath, readHeaders = True, RPMDbPath = DEFAULT_RPM_DB_PATH): """ DESC : Constructor for Configuration class. Two arguments : PARAMS : - confPath : configuration path - RPMDbPath : -TESTING ONLY- *relative* RPM database path in the configuration - """ self._confPath = confPath self._RPMDbPath = RPMDbPath #rpm.setVerbosity(7) rpm.addMacro("_dbpath", self._confPath + self._RPMDbPath) self._ts = rpm.TransactionSet() #self._ts.Debug(1) self._ts.openDB() rpm.delMacro("_dbpath") self._hdrs = [] if readHeaders: self._parseHdrs()
def set_macro(self, macro, value): if not RPM_AVAILABLE: raise exception.RpmModuleNotAvailable() rex = self.RE_MACRO_BASE.format(re.escape(macro)) rpm.delMacro(macro) if value: # replace self._txt, n = re.subn(r'^(%s).*$' % rex, '\g<1>%s' % value, self.txt, flags=re.M) if n < 1: # create new self._txt = u'%global {0} {1}\n{2}'.format( macro, value, self.txt) rpm.addMacro(macro, value) else: # remove self._txt = re.sub(r'(^|\n)%s[^\n]+\n?' % rex, '\g<1>', self.txt)
def get_package_header(filename=None, file_obj=None, fd=None): """ Loads the package header from a file / stream / file descriptor Raises rpm.error if an error is found, or InvalidPacageError if package is busted """ global SHARED_TS # XXX Deal with exceptions better if (filename is None and file_obj is None and fd is None): raise ValueError("No parameters passed") if filename is not None: f = open(filename, 'r') elif file_obj is not None: f = file_obj f.seek(0, 0) else: # fd is not None f = None if f is None: os.lseek(fd, 0, 0) file_desc = fd else: file_desc = f.fileno() # don't try to use rpm.readHeaderFromFD() here, it brokes signatures # see commit message if not SHARED_TS: SHARED_TS = rpm.ts() SHARED_TS.setVSFlags(-1) rpm.addMacro('_dbpath', '/var/cache/rhn/rhnpush-rpmdb') try: hdr = SHARED_TS.hdrFromFdno(file_desc) rpm.delMacro('_dbpath') except: rpm.delMacro('_dbpath') raise if hdr is None: raise InvalidPackageError is_source = hdr[rpm.RPMTAG_SOURCEPACKAGE] return RPM_Header(hdr, is_source)
def get_package_header(filename=None, file_obj=None, fd=None): """ Loads the package header from a file / stream / file descriptor Raises rpm.error if an error is found, or InvalidPacageError if package is busted """ global SHARED_TS # XXX Deal with exceptions better if (filename is None and file_obj is None and fd is None): raise ValueError("No parameters passed") if filename is not None: f = open(filename, 'rb') elif file_obj is not None: f = file_obj f.seek(0, 0) else: # fd is not None f = None if f is None: os.lseek(fd, 0, 0) file_desc = fd else: file_desc = f.fileno() # don't try to use rpm.readHeaderFromFD() here, it brokes signatures # see commit message if not SHARED_TS: SHARED_TS = rpm.ts() SHARED_TS.setVSFlags(-1) rpm.addMacro('_dbpath', '/var/cache/rhn/rhnpush-rpmdb') try: hdr = SHARED_TS.hdrFromFdno(file_desc) rpm.delMacro('_dbpath') except: rpm.delMacro('_dbpath') raise if hdr is None: raise InvalidPackageError is_source = hdr[rpm.RPMTAG_SOURCEPACKAGE] return RPM_Header(hdr, is_source)
def rpm_name_from_header(hdr): """Return the name of the binary package file which will be built from hdr""" rpm.addMacro('NAME', self.map_package_name(hdr['name'])[0]) rpm.addMacro('VERSION', hdr['version']) rpm.addMacro('RELEASE', hdr['release']) rpm.addMacro('ARCH', self.map_arch(hdr['arch'])) rpmname = rpm.expandMacro(self.rpmfilenamepat) rpm.delMacro('NAME') rpm.delMacro('VERSION') rpm.delMacro('RELEASE') rpm.delMacro('ARCH') return os.path.join(RPMDIR, rpmname)
def rpm_name_from_header(hdr): """Return the name of the binary package file which will be built from hdr""" rpm.addMacro("NAME", self.map_package_name(hdr["name"])[0]) rpm.addMacro("VERSION", hdr["version"]) rpm.addMacro("RELEASE", hdr["release"]) rpm.addMacro("ARCH", self.map_arch(hdr["arch"])) rpmname = rpm.expandMacro(self.rpmfilenamepat) rpm.delMacro("NAME") rpm.delMacro("VERSION") rpm.delMacro("RELEASE") rpm.delMacro("ARCH") return os.path.join(RPMDIR, rpmname)
def builddep(altdbpath): if altdbpath: rpm.addMacro('_dbpath', altdbpath) ts = rpm.TransactionSet() mi = ts.dbMatch() if altdbpath: rpm.delMacro('_dbpath') for h in mi: arch = h['arch'] if h['arch'] else '(none)' name = h['name']+'.'+arch Reqs[name] = h['requires'] for f in h['provides']: if f not in Feat: Feat[f] = [] if name not in Feat[f]: Feat[f].append(name) for f in h['filenames']: if f not in File: File[f] = [] if name not in File[f]: File[f].append(name) if Options.verbose: print Feat print File names = Reqs.keys() names.sort() for name in names: # translate requirements (rpm names/features/filenames) to rpm names f = flatten([lookup(y) for y in Reqs[name]]) Reqs[name] = uniq([x for x in f if x != name]) if Options.verbose and (name == 'gpg-pubkey.(none)' or name == 'kernel.x86_64' or name == 'kernel-devel.x86_64'): print Reqs[name] return Reqs
def configure_versionlock(): log.info("Configuring versionlock for %s" % new_fs.source) fmt = "{0.name}-{0.version}-{0.release}.{0.arch}\n" data = "# imgbased: versionlock begin for layer %s\n" % new_fs.source rpm.addMacro("_dbpath", new_fs.path("/usr/share/rpm")) for hdr in rpm.TransactionSet().dbMatch(): if "image-update" in hdr.name.decode("utf-8"): continue data += fmt.format(hdr) rpm.delMacro("_dbpath") data += "# imgbased: versionlock end\n" # versionlock.list must exist, so find which one should we use for d in ("/etc/yum/pluginconf.d", "/etc/dnf/plugins/"): f = File(os.path.join(new_fs.path(d), "versionlock.list")) if f.exists(): f.write(data) # Make sure we follow obsoletes for `yum versionlock` conf = File(new_fs.path("/etc/yum/pluginconf.d/versionlock.conf")) if conf.exists(): data = conf.contents.splitlines() pattern = re.compile("^follow_obsoletes\\s*=\\s*1") if not [x for x in data if pattern.search(x)]: conf.writen("follow_obsoletes = 1", mode="a")
def _load_installed_packages(self): """Visit rpmdb and load data from installed packages.""" log.info('reading installed packages.') if self._rpm_dbpath is not None: rpm.addMacro('_dbpath', self._rpm_dbpath) for pkg in rpm.ts().dbMatch(): # TODO Load capabilities information in the same manner # Media.list_medias() will return. pkgdict = {} for attr in ('name', 'version', 'release', 'arch', 'epoch', 'size', 'group', 'summary', 'installtime', 'disttag', 'distepoch'): value = pkg[attr] if type(value) is list and len(value) == 0: value = '' pkgdict[attr] = value if type(pkg['installtime']) is list: pkgdict['installtime'] = pkg['installtime'][0] if pkgdict['epoch'] is None: pkgdict['epoch'] = 0 self._on_package_data(pkgdict) rpm.delMacro('_dbpath')
def rpmNameFromHeader( h ): if buildType() == "rpm": rpm.addMacro( 'NAME', h['name'] ) else: rpm.addMacro( 'NAME', mappkgname.map_package_name(h) ) rpm.addMacro( 'VERSION', h['version'] ) rpm.addMacro( 'RELEASE', h['release'] ) if buildType() == "rpm": rpm.addMacro( 'ARCH', h['arch'] ) else: rpm.addMacro( 'ARCH', "amd64" if h['arch'] == "x86_64" else "all" if h['arch'] == "noarch" else h['arch']) rpmname = rpm.expandMacro( rpmfilenamepat ) rpm.delMacro( 'NAME' ) rpm.delMacro( 'VERSION' ) rpm.delMacro( 'RELEASE' ) rpm.delMacro( 'ARCH' ) return rpmname
def rpm_name_from_header(hdr): rpm.addMacro('NAME', map_package_name(hdr['name'])[0]) rpm.addMacro('VERSION', hdr['version']) rpm.addMacro('RELEASE', hdr['release']) if build_type() == "rpm": rpm.addMacro('ARCH', hdr['arch']) else: rpm.addMacro( 'ARCH', "amd64" if hdr['arch'] == "x86_64" else "all" if hdr['arch'] == "noarch" else hdr['arch']) rpmname = rpm.expandMacro(RPMFILENAMEPAT) rpm.delMacro('NAME') rpm.delMacro('VERSION') rpm.delMacro('RELEASE') rpm.delMacro('ARCH') return rpmname
def source_package_path(self): """Return the path of the source package which building this spec will produce""" hdr = self.spec.sourceHeader rpm.addMacro('NAME', self.map_package_name(hdr['name'])[0]) rpm.addMacro('VERSION', hdr['version']) rpm.addMacro('RELEASE', hdr['release']) rpm.addMacro('ARCH', 'src') # There doesn't seem to be a macro for the name of the source # rpm, but the name appears to be the same as the rpm name format. # Unfortunately expanding that macro gives us a leading 'src' that we # don't want, so we strip that off srpmname = os.path.basename(rpm.expandMacro(self.srpmfilenamepat)) rpm.delMacro('NAME') rpm.delMacro('VERSION') rpm.delMacro('RELEASE') rpm.delMacro('ARCH') return os.path.join(SRPMDIR, srpmname)
class Checker: def __init__(self): self.dbdir = mkdtemp(prefix='oscrpmdb') self.imported = {} rpm.addMacro('_dbpath', self.dbdir) self.ts = rpm.TransactionSet() self.ts.initDB() self.ts.openDB() self.ts.setVSFlags(0) #self.ts.Debug(1) def readkeys(self, keys=[]): rpm.addMacro('_dbpath', self.dbdir) for key in keys: try: self.readkey(key) except KeyError, e: print e if not len(self.imported): raise KeyError('', "no key imported") rpm.delMacro("_dbpath")
def check_spec(self, pkg, spec_file, spec_lines=None): self._spec_file = spec_file spec_only = isinstance(pkg, Pkg.FakePkg) if not spec_lines: spec_lines = Pkg.readlines(spec_file) patches = {} applied_patches = [] applied_patches_ifarch = [] patches_auto_applied = False source_dir = False buildroot = False configure_linenum = None configure_cmdline = "" mklibname = False is_lib_pkg = False if_depth = 0 ifarch_depth = -1 current_section = 'package' buildroot_clean = {'clean': False, 'install': False} depscript_override = False depgen_disabled = False patch_fuzz_override = False indent_spaces = 0 indent_tabs = 0 section = {} # None == main package current_package = None package_noarch = {} is_utf8 = False if self._spec_file and use_utf8: if Pkg.is_utf8(self._spec_file): is_utf8 = True else: printError(pkg, "non-utf8-spec-file", self._spec_name or self._spec_file) # gather info from spec lines pkg.current_linenum = 0 nbsp = UNICODE_NBSP if is_utf8 else chr(0xA0) do_unicode = is_utf8 and sys.version_info[0] <= 2 for line in spec_lines: pkg.current_linenum += 1 if do_unicode: line = unicode(line, "utf-8", "replace") # noqa false positive char = line.find(nbsp) if char != -1: printWarning(pkg, "non-break-space", "line %s, char %d" % (pkg.current_linenum, char)) section_marker = False for sec, regex in section_regexs.items(): res = regex.search(line) if res: current_section = sec section_marker = True section[sec] = section.get(sec, 0) + 1 if sec in ('package', 'files'): rest = filelist_regex.sub('', line[res.end() - 1:]) res = pkgname_regex.search(rest) if res: current_package = res.group(1) else: current_package = None break if section_marker: if not is_lib_pkg and lib_package_regex.search(line): is_lib_pkg = True continue if current_section in ('prep', 'build') and \ contains_buildroot(line): printWarning(pkg, 'rpm-buildroot-usage', '%' + current_section, line[:-1].strip()) if make_check_regex.search(line) and current_section not in \ ('check', 'changelog', 'package', 'description'): printWarning(pkg, 'make-check-outside-check-section', line[:-1]) if current_section in buildroot_clean and \ not buildroot_clean[current_section] and \ contains_buildroot(line) and rm_regex.search(line): buildroot_clean[current_section] = True if ifarch_regex.search(line): if_depth = if_depth + 1 ifarch_depth = if_depth if if_regex.search(line): if_depth = if_depth + 1 if setup_regex.match(line): if not setup_q_regex.search(line): # Don't warn if there's a -T without -a or -b if setup_t_regex.search(line): if setup_ab_regex.search(line): printWarning(pkg, 'setup-not-quiet') else: printWarning(pkg, 'setup-not-quiet') if current_section != 'prep': printWarning(pkg, 'setup-not-in-prep') elif autopatch_regex.search(line): patches_auto_applied = True if current_section != 'prep': printWarning(pkg, '%autopatch-not-in-prep') else: res = autosetup_regex.search(line) if res: if not autosetup_n_regex.search(res.group(1)): patches_auto_applied = True if current_section != 'prep': printWarning(pkg, '%autosetup-not-in-prep') if endif_regex.search(line): if ifarch_depth == if_depth: ifarch_depth = -1 if_depth = if_depth - 1 res = applied_patch_regex.search(line) if res: pnum = res.group(1) or 0 for tmp in applied_patch_p_regex.findall(line) or [pnum]: pnum = int(tmp) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_pipe_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_i_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) if not res and not source_dir: res = source_dir_regex.search(line) if res: source_dir = True printError(pkg, "use-of-RPM_SOURCE_DIR") if configure_linenum: if configure_cmdline[-1] == "\\": configure_cmdline = configure_cmdline[:-1] + line.strip() else: res = configure_libdir_spec_regex.search(configure_cmdline) if not res: # Hack to get the correct (start of ./configure) line # number displayed: real_linenum = pkg.current_linenum pkg.current_linenum = configure_linenum printWarning(pkg, "configure-without-libdir-spec") pkg.current_linenum = real_linenum elif res.group(1): res = re.match(hardcoded_library_paths, res.group(1)) if res: printError(pkg, "hardcoded-library-path", res.group(1), "in configure options") configure_linenum = None hashPos = line.find("#") if current_section != 'changelog': cfgPos = line.find('./configure') if cfgPos != -1 and (hashPos == -1 or hashPos > cfgPos): # store line where it started configure_linenum = pkg.current_linenum configure_cmdline = line.strip() res = hardcoded_library_path_regex.search(line) if current_section != 'changelog' and res and not \ (biarch_package_regex.match(pkg.name) or hardcoded_lib_path_exceptions_regex.search( res.group(1).lstrip())): printError(pkg, "hardcoded-library-path", "in", res.group(1).lstrip()) if '%mklibname' in line: mklibname = True if current_section == 'package': # Would be cleaner to get sources and patches from the # specfile parsed in Python (see below), but we want to # catch %ifarch'd etc ones as well, and also catch these when # the specfile is not parseable. res = patch_regex.search(line) if res: pnum = int(res.group(1) or 0) patches[pnum] = res.group(2) res = obsolete_tags_regex.search(line) if res: printWarning(pkg, "obsolete-tag", res.group(1)) res = buildroot_regex.search(line) if res: buildroot = True if res.group(1).startswith('/'): printWarning(pkg, 'hardcoded-path-in-buildroot-tag', res.group(1)) res = buildarch_regex.search(line) if res: if res.group(1) != "noarch": printError(pkg, 'buildarch-instead-of-exclusivearch-tag', res.group(1)) else: package_noarch[current_package] = True res = packager_regex.search(line) if res: printWarning(pkg, 'hardcoded-packager-tag', res.group(1)) res = prefix_regex.search(line) if res: if not res.group(1).startswith('%'): printWarning(pkg, 'hardcoded-prefix-tag', res.group(1)) res = prereq_regex.search(line) if res: printError(pkg, 'prereq-use', res.group(2)) res = buildprereq_regex.search(line) if res: printError(pkg, 'buildprereq-use', res.group(1)) if scriptlet_requires_regex.search(line): printError(pkg, 'broken-syntax-in-scriptlet-requires', line.strip()) res = requires_regex.search(line) if res: reqs = Pkg.parse_deps(res.group(1)) for req in unversioned(reqs): if compop_regex.search(req): printWarning(pkg, 'comparison-operator-in-deptoken', req) res = provides_regex.search(line) if res: provs = Pkg.parse_deps(res.group(1)) for prov in unversioned(provs): if not prov.startswith('/'): printWarning(pkg, 'unversioned-explicit-provides', prov) if compop_regex.search(prov): printWarning(pkg, 'comparison-operator-in-deptoken', prov) res = obsoletes_regex.search(line) if res: obses = Pkg.parse_deps(res.group(1)) for obs in unversioned(obses): if not obs.startswith('/'): printWarning(pkg, 'unversioned-explicit-obsoletes', obs) if compop_regex.search(obs): printWarning(pkg, 'comparison-operator-in-deptoken', obs) res = conflicts_regex.search(line) if res: confs = Pkg.parse_deps(res.group(1)) for conf in unversioned(confs): if compop_regex.search(conf): printWarning(pkg, 'comparison-operator-in-deptoken', conf) if current_section == 'changelog': for match in AbstractCheck.macro_regex.findall(line): res = re.match('%+', match) if len(res.group(0)) % 2: printWarning(pkg, 'macro-in-%changelog', match) else: if not depscript_override: depscript_override = \ depscript_override_regex.search(line) is not None if not depgen_disabled: depgen_disabled = \ depgen_disable_regex.search(line) is not None if not patch_fuzz_override: patch_fuzz_override = \ patch_fuzz_override_regex.search(line) is not None if current_section == 'files': # TODO: check scriptlets for these too? if package_noarch.get(current_package) or \ (current_package not in package_noarch and package_noarch.get(None)): res = libdir_regex.search(line) if res: pkgname = current_package if pkgname is None: pkgname = '(main package)' printWarning(pkg, 'libdir-macro-in-noarch-package', pkgname, line.rstrip()) if not indent_tabs and '\t' in line: indent_tabs = pkg.current_linenum if not indent_spaces and indent_spaces_regex.search(line): indent_spaces = pkg.current_linenum # Check if egrep or fgrep is used if current_section not in \ ('package', 'changelog', 'description', 'files'): greps = deprecated_grep_regex.findall(line) if greps: printWarning(pkg, "deprecated-grep", greps) # If not checking spec file only, we're checking one inside a # SRPM -> skip this check to avoid duplicate warnings (#167) if spec_only and VALID_GROUPS and \ line.lower().startswith("group:"): group = line[6:].strip() if group not in VALID_GROUPS: printWarning(pkg, 'non-standard-group', group) # Test if there are macros in comments if hashPos != -1 and \ (hashPos == 0 or line[hashPos - 1] in (" ", "\t")): for match in AbstractCheck.macro_regex.findall( line[hashPos + 1:]): res = re.match('%+', match) if len(res.group(0)) % 2: printWarning(pkg, 'macro-in-comment', match) # Last line read is not useful after this point pkg.current_linenum = None for sect in (x for x in buildroot_clean if not buildroot_clean[x]): printWarning(pkg, 'no-cleaning-of-buildroot', '%' + sect) if not buildroot: printWarning(pkg, 'no-buildroot-tag') for sec in ('prep', 'build', 'install', 'clean'): if not section.get(sec): printWarning(pkg, 'no-%%%s-section' % sec) for sec in ('changelog',): # prep, build, install, clean, check prevented by rpmbuild 4.4 if section.get(sec, 0) > 1: printWarning(pkg, 'more-than-one-%%%s-section' % sec) if is_lib_pkg and not mklibname: printError(pkg, 'lib-package-without-%mklibname') if depscript_override and not depgen_disabled: printWarning(pkg, 'depscript-without-disabling-depgen') if patch_fuzz_override: printWarning(pkg, 'patch-fuzz-is-changed') if indent_spaces and indent_tabs: pkg.current_linenum = max(indent_spaces, indent_tabs) printWarning(pkg, 'mixed-use-of-spaces-and-tabs', '(spaces: line %d, tab: line %d)' % (indent_spaces, indent_tabs)) pkg.current_linenum = None # process gathered info if not patches_auto_applied: for pnum, pfile in patches.items(): if pnum in applied_patches_ifarch: printWarning(pkg, "%ifarch-applied-patch", "Patch%d:" % pnum, pfile) if pnum not in applied_patches: printWarning(pkg, "patch-not-applied", "Patch%d:" % pnum, pfile) # Rest of the checks require a real spec file if not self._spec_file: return # We'd like to parse the specfile only once using python bindings, # but it seems errors from rpmlib get logged to stderr and we can't # capture and print them nicely, so we do it once each way :P out = Pkg.getstatusoutput( ('rpm', '-q', '--qf=', '-D', '_sourcedir %s' % pkg.dirName(), '--specfile', self._spec_file)) parse_error = False for line in out[1].splitlines(): # No such file or dir hack: https://bugzilla.redhat.com/487855 if 'No such file or directory' not in line: parse_error = True printError(pkg, 'specfile-error', line) if not parse_error: # grab sources and patches from parsed spec object to get # them with macros expanded for URL checking spec_obj = None rpm.addMacro('_sourcedir', pkg.dirName()) try: ts = rpm.TransactionSet() spec_obj = ts.parseSpec(self._spec_file) except rpm.error: # errors logged above already pass rpm.delMacro('_sourcedir') if spec_obj: try: # rpm < 4.8.0 sources = spec_obj.sources() except TypeError: # rpm >= 4.8.0 sources = spec_obj.sources for src in sources: (url, num, flags) = src (scheme, netloc) = urlparse(url)[0:2] if flags & 1: # rpmspec.h, rpm.org ticket #123 srctype = "Source" else: srctype = "Patch" tag = '%s%s' % (srctype, num) if scheme and netloc: info = self.check_url(pkg, tag, url) if not info or not hasattr(pkg, 'files'): continue clen = info.get("Content-Length") if clen is not None: clen = int(clen) cmd5 = info.get("Content-MD5") if cmd5 is not None: cmd5 = cmd5.lower() if clen is not None or cmd5 is not None: # Not using path from urlparse results to match how # rpm itself parses the basename. pkgfile = pkg.files().get(url.split("/")[-1]) if pkgfile: if clen is not None and pkgfile.size != clen: printWarning(pkg, 'file-size-mismatch', '%s = %s, %s = %s' % (pkgfile.name, pkgfile.size, url, clen)) # pkgfile.md5 could be some other digest than # MD5, treat as MD5 only if it's 32 chars long if cmd5 and len(pkgfile.md5) == 32 \ and pkgfile.md5 != cmd5: printWarning(pkg, 'file-md5-mismatch', '%s = %s, %s = %s' % (pkgfile.name, pkgfile.md5, url, cmd5)) elif srctype == "Source" and tarball_regex.search(url): printWarning(pkg, 'invalid-url', '%s:' % tag, url)
def purge_macro(cls, macro: str) -> None: m = '%{{{}}}'.format(macro) while cls.expand(m, m) != m: rpm.delMacro(macro)
def check_spec(self, pkg, spec_file, spec_lines=None): self._spec_file = spec_file spec_only = isinstance(pkg, Pkg.FakePkg) if not spec_lines: spec_lines = Pkg.readlines(spec_file) patches = {} applied_patches = [] applied_patches_ifarch = [] patches_auto_applied = False source_dir = False buildroot = False configure_linenum = None configure_cmdline = '' mklibname = False is_lib_pkg = False if_depth = 0 ifarch_depth = -1 current_section = 'package' buildroot_clean = {'clean': False, 'install': False} depscript_override = False depgen_disabled = False patch_fuzz_override = False indent_spaces = 0 indent_tabs = 0 section = {} # None == main package current_package = None package_noarch = {} if self._spec_file: if not Pkg.is_utf8(self._spec_file): self.output.add_info('E', pkg, 'non-utf8-spec-file', self._spec_name or self._spec_file) # gather info from spec lines pkg.current_linenum = 0 nbsp = UNICODE_NBSP for line in spec_lines: pkg.current_linenum += 1 char = line.find(nbsp) if char != -1: self.output.add_info( 'W', pkg, 'non-break-space', 'line %s, char %d' % (pkg.current_linenum, char)) section_marker = False for sec, regex in section_regexs.items(): res = regex.search(line) if res: current_section = sec section_marker = True section[sec] = section.get(sec, 0) + 1 if sec in ('package', 'files'): rest = filelist_regex.sub('', line[res.end() - 1:]) res = pkgname_regex.search(rest) if res: current_package = res.group(1) else: current_package = None break if section_marker: if not is_lib_pkg and lib_package_regex.search(line): is_lib_pkg = True continue if (current_section in Pkg.RPM_SCRIPTLETS + ('prep', 'build') and contains_buildroot(line)): self.output.add_info('W', pkg, 'rpm-buildroot-usage', '%' + current_section, line[:-1].strip()) if make_check_regex.search(line) and current_section not in \ ('check', 'changelog', 'package', 'description'): self.output.add_info('W', pkg, 'make-check-outside-check-section', line[:-1]) if current_section in buildroot_clean and \ not buildroot_clean[current_section] and \ contains_buildroot(line) and rm_regex.search(line): buildroot_clean[current_section] = True if ifarch_regex.search(line): if_depth = if_depth + 1 ifarch_depth = if_depth if if_regex.search(line): if_depth = if_depth + 1 if setup_regex.match(line): if not setup_q_regex.search(line): # Don't warn if there's a -T without -a or -b if setup_t_regex.search(line): if setup_ab_regex.search(line): self.output.add_info('W', pkg, 'setup-not-quiet') else: self.output.add_info('W', pkg, 'setup-not-quiet') if current_section != 'prep': self.output.add_info('W', pkg, 'setup-not-in-prep') elif autopatch_regex.search(line): patches_auto_applied = True if current_section != 'prep': self.output.add_info('W', pkg, '%autopatch-not-in-prep') else: res = autosetup_regex.search(line) if res: if not autosetup_n_regex.search(res.group(1)): patches_auto_applied = True if current_section != 'prep': self.output.add_info('W', pkg, '%autosetup-not-in-prep') if endif_regex.search(line): if ifarch_depth == if_depth: ifarch_depth = -1 if_depth = if_depth - 1 res = applied_patch_regex.search(line) if res: pnum = res.group(1) or 0 for tmp in applied_patch_p_regex.findall(line) or [pnum]: pnum = int(tmp) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_pipe_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_i_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) if not res and not source_dir: res = source_dir_regex.search(line) if res: source_dir = True self.output.add_info('E', pkg, 'use-of-RPM_SOURCE_DIR') if configure_linenum: if configure_cmdline[-1] == '\\': configure_cmdline = configure_cmdline[:-1] + line.strip() else: res = configure_libdir_spec_regex.search(configure_cmdline) if not res: # Hack to get the correct (start of ./configure) line # number displayed: real_linenum = pkg.current_linenum pkg.current_linenum = configure_linenum self.output.add_info('W', pkg, 'configure-without-libdir-spec') pkg.current_linenum = real_linenum elif res.group(1): res = re.match(hardcoded_library_paths, res.group(1)) if res: self.output.add_info('E', pkg, 'hardcoded-library-path', res.group(1), 'in configure options') configure_linenum = None hashPos = line.find('#') if current_section != 'changelog': cfgPos = line.find('./configure') if cfgPos != -1 and (hashPos == -1 or hashPos > cfgPos): # store line where it started configure_linenum = pkg.current_linenum configure_cmdline = line.strip() res = hardcoded_library_path_regex.search(line) if current_section != 'changelog' and res and not \ (biarch_package_regex.match(pkg.name) or self.hardcoded_lib_path_exceptions_regex.search( res.group(1).lstrip())): self.output.add_info('E', pkg, 'hardcoded-library-path', 'in', res.group(1).lstrip()) if '%mklibname' in line: mklibname = True if current_section == 'package': # Would be cleaner to get sources and patches from the # specfile parsed in Python (see below), but we want to # catch %ifarch'd etc ones as well, and also catch these when # the specfile is not parseable. res = patch_regex.search(line) if res: pnum = int(res.group(1) or 0) patches[pnum] = res.group(2) res = obsolete_tags_regex.search(line) if res: self.output.add_info('W', pkg, 'obsolete-tag', res.group(1)) res = buildroot_regex.search(line) if res: buildroot = True if res.group(1).startswith('/'): self.output.add_info( 'W', pkg, 'hardcoded-path-in-buildroot-tag', res.group(1)) res = buildarch_regex.search(line) if res: if res.group(1) != 'noarch': self.output.add_info( 'E', pkg, 'buildarch-instead-of-exclusivearch-tag', res.group(1)) else: package_noarch[current_package] = True res = packager_regex.search(line) if res: self.output.add_info('W', pkg, 'hardcoded-packager-tag', res.group(1)) res = prefix_regex.search(line) if res: if not res.group(1).startswith('%'): self.output.add_info('W', pkg, 'hardcoded-prefix-tag', res.group(1)) res = prereq_regex.search(line) if res: self.output.add_info('E', pkg, 'prereq-use', res.group(2)) res = buildprereq_regex.search(line) if res: self.output.add_info('E', pkg, 'buildprereq-use', res.group(1)) if scriptlet_requires_regex.search(line): self.output.add_info( 'E', pkg, 'broken-syntax-in-scriptlet-requires', line.strip()) res = requires_regex.search(line) if res: reqs = Pkg.parse_deps(res.group(1)) for req in unversioned(reqs): if compop_regex.search(req): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', req) res = provides_regex.search(line) if res: provs = Pkg.parse_deps(res.group(1)) for prov in unversioned(provs): if not prov.startswith('/'): self.output.add_info( 'W', pkg, 'unversioned-explicit-provides', prov) if compop_regex.search(prov): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', prov) res = obsoletes_regex.search(line) if res: obses = Pkg.parse_deps(res.group(1)) for obs in unversioned(obses): if not obs.startswith('/'): self.output.add_info( 'W', pkg, 'unversioned-explicit-obsoletes', obs) if compop_regex.search(obs): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', obs) res = conflicts_regex.search(line) if res: confs = Pkg.parse_deps(res.group(1)) for conf in unversioned(confs): if compop_regex.search(conf): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', conf) if current_section == 'changelog': for match in macro_regex.findall(line): res = re.match('%+', match) if len(res.group(0)) % 2: self.output.add_info('W', pkg, 'macro-in-%changelog', match) else: if not depscript_override: depscript_override = \ depscript_override_regex.search(line) is not None if not depgen_disabled: depgen_disabled = \ depgen_disable_regex.search(line) is not None if not patch_fuzz_override: patch_fuzz_override = \ patch_fuzz_override_regex.search(line) is not None if current_section == 'files': # TODO: check scriptlets for these too? if package_noarch.get(current_package) or \ (current_package not in package_noarch and package_noarch.get(None)): res = libdir_regex.search(line) if res: pkgname = current_package if pkgname is None: pkgname = '(main package)' self.output.add_info('W', pkg, 'libdir-macro-in-noarch-package', pkgname, line.rstrip()) if not indent_tabs and '\t' in line: indent_tabs = pkg.current_linenum if not indent_spaces and indent_spaces_regex.search(line): indent_spaces = pkg.current_linenum # Check if egrep or fgrep is used if current_section not in \ ('package', 'changelog', 'description', 'files'): greps = deprecated_grep_regex.findall(line) if greps: self.output.add_info('W', pkg, 'deprecated-grep', greps) # If not checking spec file only, we're checking one inside a # SRPM -> skip this check to avoid duplicate warnings (#167) if spec_only and self.valid_groups and \ line.lower().startswith('group:'): group = line[6:].strip() if group not in self.valid_groups: self.output.add_info('W', pkg, 'non-standard-group', group) # Test if there are macros in comments if hashPos != -1 and \ (hashPos == 0 or line[hashPos - 1] in (' ', '\t')): for match in macro_regex.findall(line[hashPos + 1:]): res = re.match('%+', match) if len(res.group(0)) % 2: self.output.add_info('W', pkg, 'macro-in-comment', match) # Last line read is not useful after this point pkg.current_linenum = None for sect in (x for x in buildroot_clean if not buildroot_clean[x]): self.output.add_info('W', pkg, 'no-cleaning-of-buildroot', '%' + sect) if not buildroot: self.output.add_info('W', pkg, 'no-buildroot-tag') for sec in ('prep', 'build', 'install', 'clean'): if not section.get(sec): self.output.add_info('W', pkg, 'no-%%%s-section' % sec) for sec in ('changelog', ): # prep, build, install, clean, check prevented by rpmbuild 4.4 if section.get(sec, 0) > 1: self.output.add_info('W', pkg, 'more-than-one-%%%s-section' % sec) if is_lib_pkg and not mklibname: self.output.add_info('E', pkg, 'lib-package-without-%mklibname') if depscript_override and not depgen_disabled: self.output.add_info('W', pkg, 'depscript-without-disabling-depgen') if patch_fuzz_override: self.output.add_info('W', pkg, 'patch-fuzz-is-changed') if indent_spaces and indent_tabs: pkg.current_linenum = max(indent_spaces, indent_tabs) self.output.add_info( 'W', pkg, 'mixed-use-of-spaces-and-tabs', '(spaces: line %d, tab: line %d)' % (indent_spaces, indent_tabs)) pkg.current_linenum = None # process gathered info if not patches_auto_applied: for pnum, pfile in patches.items(): if pnum in applied_patches_ifarch: self.output.add_info('W', pkg, '%ifarch-applied-patch', 'Patch%d:' % pnum, pfile) if pnum not in applied_patches: self.output.add_info('W', pkg, 'patch-not-applied', 'Patch%d:' % pnum, pfile) # Rest of the checks require a real spec file if not self._spec_file: return # We'd like to parse the specfile only once using python bindings, # but it seems errors from rpmlib get logged to stderr and we can't # capture and print them nicely, so we do it once each way :P out = Pkg.getstatusoutput( ('rpm', '-q', '--qf=', '-D', '_sourcedir %s' % pkg.dirName(), '--specfile', self._spec_file)) parse_error = False for line in out[1].splitlines(): # No such file or dir hack: https://bugzilla.redhat.com/487855 if 'No such file or directory' not in line: parse_error = True self.output.add_info('E', pkg, 'specfile-error', line) if not parse_error: # grab sources and patches from parsed spec object to get # them with macros expanded for URL checking spec_obj = None rpm.addMacro('_sourcedir', pkg.dirName()) try: ts = rpm.TransactionSet() spec_obj = ts.parseSpec(self._spec_file) except (ValueError, rpm.error): # errors logged above already pass rpm.delMacro('_sourcedir') if spec_obj: try: # rpm < 4.8.0 sources = spec_obj.sources() except TypeError: # rpm >= 4.8.0 sources = spec_obj.sources for src in sources: (url, num, flags) = src (scheme, netloc) = urlparse(url)[0:2] if flags & 1: # rpmspec.h, rpm.org ticket #123 srctype = 'Source' else: srctype = 'Patch' tag = '%s%s' % (srctype, num) if scheme and netloc: info = self.check_url(pkg, tag, url) if not info or not hasattr(pkg, 'files'): continue clen = info.get('Content-Length') if clen is not None: clen = int(clen) cmd5 = info.get('Content-MD5') if cmd5 is not None: cmd5 = cmd5.lower() if clen is not None or cmd5 is not None: # Not using path from urlparse results to match how # rpm itself parses the basename. pkgfile = pkg.files().get(url.split('/')[-1]) if pkgfile: if clen is not None and pkgfile.size != clen: self.output.add_info( 'W', pkg, 'file-size-mismatch', '%s = %s, %s = %s' % (pkgfile.name, pkgfile.size, url, clen)) # pkgfile.md5 could be some other digest than # MD5, treat as MD5 only if it's 32 chars long if cmd5 and len(pkgfile.md5) == 32 \ and pkgfile.md5 != cmd5: self.output.add_info( 'W', pkg, 'file-md5-mismatch', '%s = %s, %s = %s' % (pkgfile.name, pkgfile.md5, url, cmd5)) elif srctype == 'Source' and tarball_regex.search(url): self.output.add_info('W', pkg, 'invalid-url', '%s:' % tag, url)
def purge_macro(cls, macro: str) -> None: m = '%{{{}}}'.format(macro) while cls.expand(m, m) != m: with ConsoleHelper.Capturer(stderr=True): rpm.delMacro(macro)