def check_summary(self, pkg, lang, ignored_words): summary = pkg.langtag(rpm.RPMTAG_SUMMARY, lang) if use_utf8: if not Pkg.is_utf8_bytestr(summary): printError(pkg, 'tag-not-utf8', 'Summary', lang) summary = Pkg.to_unicode(summary) else: summary = Pkg.b2s(summary) self._unexpanded_macros(pkg, 'Summary(%s)' % lang, summary) spell_check(pkg, summary, 'Summary(%s)', lang, ignored_words) if '\n' in summary: printError(pkg, 'summary-on-multiple-lines', lang) if (summary[0] != summary[0].upper() and summary.partition(' ')[0] not in CAPITALIZED_IGNORE_LIST): printWarning(pkg, 'summary-not-capitalized', lang, summary) if summary[-1] == '.': printWarning(pkg, 'summary-ended-with-dot', lang, summary) if len(summary) > max_line_len: printError(pkg, 'summary-too-long', lang, summary) if leading_space_regex.search(summary): printError(pkg, 'summary-has-leading-spaces', lang, summary) res = forbidden_words_regex.search(summary) if res and Config.getOption('ForbiddenWords'): printWarning(pkg, 'summary-use-invalid-word', lang, res.group(1)) if pkg.name: sepchars = r'[\s%s]' % punct res = re.search( r'(?:^|\s)(%s)(?:%s|$)' % (re.escape(pkg.name), sepchars), summary, re.IGNORECASE | re.UNICODE) if res: printWarning(pkg, 'name-repeated-in-summary', lang, res.group(1))
def main(): ignore_tags = [] try: opts, args = getopt.getopt(sys.argv[1:], "hti:", ["help", "ignore-times", "ignore="]) except getopt.GetoptError as e: Pkg.warn("Error: %s" % e) _usage() for option, argument in opts: if option in ("-h", "--help"): _usage(0) if option in ("-t", "--ignore-times"): # deprecated; --ignore=T should be used instead ignore_tags.append("T") if option in ("-i", "--ignore"): ignore_tags.append(argument) if len(args) != 2: _usage() d = Rpmdiff(args[0], args[1], ignore=ignore_tags) textdiff = d.textdiff() if textdiff: print(textdiff) sys.exit(int(d.differs()))
def runSpecChecks(pkg, fname, spec_lines=None): if verbose: printInfo(pkg, 'checking') for name in Config.allChecks(): check = AbstractCheck.known_checks.get(name) if check: check.verbose = verbose check.check_spec(pkg, fname, spec_lines) else: Pkg.warn('(none): W: unknown check %s, skipping' % name)
def __load_pkg(self, name, tmpdir=tempfile.gettempdir()): try: if os.path.isfile(name): return Pkg.Pkg(name, tmpdir) except TypeError: pass inst = Pkg.getInstalledPkgs(name) if not inst: raise KeyError("No installed packages by name %s" % name) if len(inst) > 1: raise KeyError("More than one installed packages by name %s" % name) return inst[0]
def check(self, pkg): res = pkg.checkSignature() if not res or res[0] != 0: if res and res[1]: kres = SignatureCheck.unknown_key_regex.search(res[1]) else: kres = None if kres: printError(pkg, "unknown-key", kres.group(1)) else: Pkg.warn("Error checking signature of %s: %s" % (pkg.filename, res[1])) else: if not SignatureCheck.pgp_regex.search(res[1]): printError(pkg, "no-signature")
def test_parse_deps(): for (arg, exp) in (('a, b < 1.0 c = 5:2.0-3 d', [('a', 0, (None, None, None)), ('b', rpm.RPMSENSE_LESS, (None, '1.0', None)), ('c', rpm.RPMSENSE_EQUAL, ('5', '2.0', '3')), ('d', 0, (None, None, None))]), ): assert Pkg.parse_deps(arg) == exp
def check_description(self, pkg, lang, ignored_words): description = pkg.langtag(rpm.RPMTAG_DESCRIPTION, lang) if not Pkg.is_utf8_bytestr(description): self.output.add_info('E', pkg, 'tag-not-utf8', '%description', lang) description = byte_to_string(description) self._unexpanded_macros(pkg, '%%description -l %s' % lang, description) if self.spellcheck: pkgname = byte_to_string(pkg.header[rpm.RPMTAG_NAME]) typos = self.spellchecker.spell_check(description, '%description -l {}', lang, pkgname, ignored_words) for typo in typos.items(): self.output.add_info('E', pkg, 'spelling-error', typo) for l in description.splitlines(): if len(l) > self.max_line_len: self.output.add_info('E', pkg, 'description-line-too-long', lang, l) res = self.forbidden_words_regex.search(l) if res and self.config.configuration['ForbiddenWords']: self.output.add_info('W', pkg, 'description-use-invalid-word', lang, res.group(1)) res = tag_regex.search(l) if res: self.output.add_info('W', pkg, 'tag-in-description', lang, res.group(1))
def test_parse_deps(): for (arg, exp) in (("a, b < 1.0 c = 5:2.0-3 d", [("a", 0, (None, None, None)), ("b", rpm.RPMSENSE_LESS, (None, "1.0", None)), ("c", rpm.RPMSENSE_EQUAL, ("5", "2.0", "3")), ("d", 0, (None, None, None))]), ): assert Pkg.parse_deps(arg) == exp
def __comparePRCOs(self, old, new, name): try: oldflags = old[name[:-1] + 'FLAGS'] except ValueError: # assume tag not supported, e.g. Recommends with older rpm return newflags = new[name[:-1] + 'FLAGS'] # fix buggy rpm binding not returning list for single entries if not isinstance(oldflags, list): oldflags = [oldflags] if not isinstance(newflags, list): newflags = [newflags] o = zip(old[name], oldflags, old[name[:-1] + 'VERSION']) if not isinstance(o, list): o = list(o) n = zip(new[name], newflags, new[name[:-1] + 'VERSION']) if not isinstance(n, list): n = list(n) # filter self provides, TODO: self %name(%_isa) as well if name == 'PROVIDES': oldE = old['epoch'] is not None and str(old['epoch']) + ":" or "" oldV = "%s%s" % (oldE, old.format("%{VERSION}-%{RELEASE}")) oldNV = (old['name'], rpm.RPMSENSE_EQUAL, oldV.encode()) newE = new['epoch'] is not None and str(new['epoch']) + ":" or "" newV = "%s%s" % (newE, new.format("%{VERSION}-%{RELEASE}")) newNV = (new['name'], rpm.RPMSENSE_EQUAL, newV.encode()) o = [entry for entry in o if entry != oldNV] n = [entry for entry in n if entry != newNV] for oldentry in o: if oldentry not in n: namestr = name if namestr == 'REQUIRES': namestr = self.req2str(oldentry[1]) self.__add(self.DEPFORMAT, (self.REMOVED, namestr, Pkg.b2s(oldentry[0]), self.sense2str(oldentry[1]), Pkg.b2s(oldentry[2]))) for newentry in n: if newentry not in o: namestr = name if namestr == 'REQUIRES': namestr = self.req2str(newentry[1]) self.__add(self.DEPFORMAT, (self.ADDED, namestr, Pkg.b2s(newentry[0]), self.sense2str(newentry[1]), Pkg.b2s(newentry[2])))
def check_spec(self, pkg, spec_file, spec_lines=None): '''SCL spec file checks''' spec = '\n'.join(Pkg.readlines(spec_file)) if global_scl_definition.search(spec): self.check_metapackage(pkg, spec) elif scl_package_definition.search(spec): self.check_scl_spec(pkg, spec) elif scl_use.search(spec): printError(pkg, 'undeclared-scl')
def check_spec(self, pkg, spec_file, spec_lines=None): """SCL spec file checks""" spec = '\n'.join(Pkg.readlines(spec_file)) if global_scl_definition.search(spec): self.check_metapackage(pkg, spec) elif scl_package_definition.search(spec): self.check_scl_spec(pkg, spec) elif scl_use.search(spec): self.output.add_info('E', pkg, 'undeclared-scl')
def check_description(self, pkg, lang, ignored_words): description = pkg.langtag(rpm.RPMTAG_DESCRIPTION, lang) if use_utf8: if not Pkg.is_utf8_bytestr(description): printError(pkg, 'tag-not-utf8', '%description', lang) description = Pkg.to_unicode(description) else: description = Pkg.b2s(description) self._unexpanded_macros(pkg, '%%description -l %s' % lang, description) spell_check(pkg, description, '%%description -l %s', lang, ignored_words) for l in description.splitlines(): if len(l) > max_line_len: printError(pkg, 'description-line-too-long', lang, l) res = forbidden_words_regex.search(l) if res and Config.getOption('ForbiddenWords'): printWarning(pkg, 'description-use-invalid-word', lang, res.group(1)) res = tag_regex.search(l) if res: printWarning(pkg, 'tag-in-description', lang, res.group(1))
def check_syntax_script(prog, commandline, script): if not script: return False # TODO: test that 'prog' is available/executable tmpfd, tmpname = tempfile.mkstemp(prefix='rpmlint.') tmpfile = os.fdopen(tmpfd, 'wb') try: tmpfile.write(script) tmpfile.close() ret = Pkg.getstatusoutput((prog, commandline, tmpname)) finally: tmpfile.close() os.remove(tmpname) return ret[0]
def check(self, pkg): for fname, pkgfile in pkg.files().items(): path = pkgfile.path if zip_regex.search(fname) and os.path.exists(path) and \ stat.S_ISREG(os.lstat(path)[stat.ST_MODE]) and \ zipfile.is_zipfile(path): z = None # TODO ZipFile is context manager in 2.7+ try: z = zipfile.ZipFile(path, 'r') badcrc = z.testzip() if badcrc: printError(pkg, 'bad-crc-in-zip', badcrc, fname) except zipfile.error: printWarning(pkg, 'unable-to-read-zip', '%s: %s' % (fname, sys.exc_info()[1])) else: compressed = False for zinfo in z.infolist(): if zinfo.compress_type != zipfile.ZIP_STORED: compressed = True break if not compressed: printWarning(pkg, 'uncompressed-zip', fname) # additional jar checks if jar_regex.search(fname): try: mf = Pkg.b2s(z.read('META-INF/MANIFEST.MF')) if classpath_regex.search(mf): printWarning(pkg, 'class-path-in-manifest', fname) except KeyError: # META-INF/* are optional: # http://java.sun.com/j2se/1.4/docs/guide/jar/jar.html pass try: zinfo = z.getinfo('META-INF/INDEX.LIST') if not want_indexed_jars: printWarning(pkg, 'jar-indexed', fname) except KeyError: if want_indexed_jars: printWarning(pkg, 'jar-not-indexed', fname) pass z and z.close()
def check_summary(self, pkg, lang, ignored_words): summary = pkg.langtag(rpm.RPMTAG_SUMMARY, lang) if not Pkg.is_utf8_bytestr(summary): self.output.add_info('E', pkg, 'tag-not-utf8', 'Summary', lang) summary = byte_to_string(summary) self._unexpanded_macros(pkg, 'Summary(%s)' % lang, summary) if self.spellcheck: pkgname = byte_to_string(pkg.header[rpm.RPMTAG_NAME]) typos = self.spellchecker.spell_check(summary, 'Summary({})', lang, pkgname, ignored_words) for typo in typos.items(): self.output.add_info('E', pkg, 'spelling-error', typo) if '\n' in summary: self.output.add_info('E', pkg, 'summary-on-multiple-lines', lang) if (summary[0] != summary[0].upper() and summary.partition(' ')[0] not in CAPITALIZED_IGNORE_LIST): self.output.add_info('W', pkg, 'summary-not-capitalized', lang, summary) if summary[-1] == '.': self.output.add_info('W', pkg, 'summary-ended-with-dot', lang, summary) if len(summary) > self.max_line_len: self.output.add_info('E', pkg, 'summary-too-long', lang, summary) if leading_space_regex.search(summary): self.output.add_info('E', pkg, 'summary-has-leading-spaces', lang, summary) res = self.forbidden_words_regex.search(summary) if res and self.config.configuration['ForbiddenWords']: self.output.add_info('W', pkg, 'summary-use-invalid-word', lang, res.group(1)) if pkg.name: sepchars = r'[\s%s]' % punct res = re.search( r'(?:^|\s)(%s)(?:%s|$)' % (re.escape(pkg.name), sepchars), summary, re.IGNORECASE | re.UNICODE) if res: self.output.add_info('W', pkg, 'name-repeated-in-summary', lang, res.group(1))
def test_rangeCompare(): for (req, prov) in ((("foo", rpm.RPMSENSE_LESS, (None, "1.0", None)), ("foo", rpm.RPMSENSE_EQUAL, ("1", "0.5", None))), ): assert not Pkg.rangeCompare(req, prov)
def main(): locale.setlocale(locale.LC_COLLATE, '') # Add check dirs to the front of load path sys.path[0:0] = Config.checkDirs() # Load all checks for c in Config.allChecks(): loadCheck(c) packages_checked = 0 specfiles_checked = 0 try: # Loop over all file names given in arguments dirs = [] for arg in args: pkgs = [] isfile = False try: if arg == "-": arg = "(standard input)" # Short-circuit stdin spec file check stdin = sys.stdin.readlines() if not stdin: continue with Pkg.FakePkg(arg) as pkg: runSpecChecks(pkg, None, spec_lines=stdin) specfiles_checked += 1 continue try: st = os.stat(arg) isfile = True if stat.S_ISREG(st[stat.ST_MODE]): if arg.endswith(".spec"): # Short-circuit spec file checks with Pkg.FakePkg(arg) as pkg: runSpecChecks(pkg, arg) specfiles_checked += 1 elif "/" in arg or arg.endswith(".rpm") or \ arg.endswith(".spm"): pkgs.append(Pkg.Pkg(arg, extract_dir)) else: raise OSError elif stat.S_ISDIR(st[stat.ST_MODE]): dirs.append(arg) continue else: raise OSError except OSError: ipkgs = Pkg.getInstalledPkgs(arg) if not ipkgs: Pkg.warn( '(none): E: no installed packages by name %s' % arg) else: ipkgs.sort(key=lambda x: locale.strxfrm( x.header.sprintf("%{NAME}.%{ARCH}"))) pkgs.extend(ipkgs) except KeyboardInterrupt: if isfile: arg = os.path.abspath(arg) Pkg.warn('(none): E: interrupted, exiting while reading %s' % arg) sys.exit(2) except Exception as e: if isfile: arg = os.path.abspath(arg) Pkg.warn('(none): E: error while reading %s: %s' % (arg, e)) pkgs = [] continue for pkg in pkgs: with pkg: runChecks(pkg) packages_checked += 1 for dname in dirs: try: for path, _, files in os.walk(dname): for fname in files: fname = os.path.abspath(os.path.join(path, fname)) try: if fname.endswith('.rpm') or \ fname.endswith('.spm'): with Pkg.Pkg(fname, extract_dir) as pkg: runChecks(pkg) packages_checked += 1 elif fname.endswith('.spec'): with Pkg.FakePkg(fname) as pkg: runSpecChecks(pkg, fname) specfiles_checked += 1 except KeyboardInterrupt: Pkg.warn( '(none): E: interrupted while reading %s' % fname) sys.exit(2) except Exception as e: Pkg.warn('(none): E: while reading %s: %s' % (fname, e)) continue except Exception as e: Pkg.warn('(none): E: error while reading dir %s: %s' % (dname, e)) continue if printAllReasons(): Pkg.warn('(none): E: badness %d exceeds threshold %d, aborting.' % (badnessScore(), badnessThreshold())) sys.exit(66) finally: print("%d packages and %d specfiles checked; %d errors, %d warnings." % (packages_checked, specfiles_checked, printed_messages["E"], printed_messages["W"])) if printed_messages["E"] > 0: sys.exit(64) sys.exit(0)
def main(): locale.setlocale(locale.LC_COLLATE, '') output = Filter(cfg) # Load all checks for c in cfg.configuration['Checks']: loadCheck(c, cfg, output) packages_checked = 0 specfiles_checked = 0 try: # Loop over all file names given in arguments dirs = [] for arg in args: pkgs = [] isfile = False try: if arg == '-': arg = '(standard input)' # Short-circuit stdin spec file check stdin = sys.stdin.readlines() if not stdin: continue with Pkg.FakePkg(arg) as pkg: runSpecChecks(pkg, None, spec_lines=stdin) specfiles_checked += 1 continue try: st = os.stat(arg) isfile = True if stat.S_ISREG(st[stat.ST_MODE]): if arg.endswith('.spec'): # Short-circuit spec file checks with Pkg.FakePkg(arg) as pkg: runSpecChecks(pkg, arg) specfiles_checked += 1 elif '/' in arg or arg.endswith('.rpm') or \ arg.endswith('.spm'): pkgs.append(Pkg.Pkg(arg, extract_dir)) else: raise OSError elif stat.S_ISDIR(st[stat.ST_MODE]): dirs.append(arg) continue else: raise OSError except OSError: ipkgs = Pkg.getInstalledPkgs(arg) if not ipkgs: print_warning( '(none): E: no installed packages by name %s' % arg) else: ipkgs.sort(key=lambda x: locale.strxfrm( x.header.sprintf('%{NAME}.%{ARCH}'))) pkgs.extend(ipkgs) except KeyboardInterrupt: if isfile: arg = os.path.abspath(arg) print_warning( '(none): E: interrupted, exiting while reading %s' % arg) sys.exit(2) except Exception as e: if isfile: arg = os.path.abspath(arg) print_warning('(none): E: error while reading %s: %s' % (arg, e)) pkgs = [] continue for pkg in pkgs: with pkg: runChecks(pkg) packages_checked += 1 for dname in dirs: try: for path, _, files in os.walk(dname): for fname in files: fname = os.path.abspath(os.path.join(path, fname)) try: if fname.endswith('.rpm') or \ fname.endswith('.spm'): with Pkg.Pkg(fname, extract_dir) as pkg: runChecks(pkg) packages_checked += 1 elif fname.endswith('.spec'): with Pkg.FakePkg(fname) as pkg: runSpecChecks(pkg, fname) specfiles_checked += 1 except KeyboardInterrupt: print_warning( '(none): E: interrupted while reading %s' % fname) sys.exit(2) except Exception as e: print_warning('(none): E: while reading %s: %s' % (fname, e)) continue except Exception as e: print_warning('(none): E: error while reading dir %s: %s' % (dname, e)) continue print(output.print_results(output.results)) if output.badness_threshold > 0 and output.score > output.badness_threshold: print_warning( '(none): E: badness %d exceeds threshold %d, aborting.' % (output.score, output.badness_threshold)) sys.exit(66) finally: print('%d packages and %d specfiles checked; %d errors, %d warnings.' % (packages_checked, specfiles_checked, output.printed_messages['E'], output.printed_messages['W'])) if output.printed_messages['E'] > 0: sys.exit(64) sys.exit(0)
############################################################################# # ############################################################################# argv0 = os.path.basename(sys.argv[0]) # parse options try: (opt, args) = getopt.getopt(sys.argv[1:], 'iI:c:C:hVvanE:f:o:', [ 'info', 'explain=', 'check=', 'checkdir=', 'help', 'version', 'verbose', 'all', 'noexception', 'extractdir=', 'file=', 'option=', 'rawout=' ]) except getopt.GetoptError as e: Pkg.warn("%s: %s" % (argv0, e)) usage(argv0) sys.exit(1) # process options checkdir = '/usr/share/rpmlint' checks = [] verbose = False extract_dir = None conf_file = _default_user_conf info_error = set() # load global config files configs = glob.glob('/etc/rpmlint/*config') configs.sort()
def __init__(self, pkg, path, fname, is_ar, is_shlib): self.readelf_error = False self.needed = [] self.rpath = [] self.undef = [] self.unused = [] self.comment = False self.soname = False self.non_pic = True self.stack = False self.exec_stack = False self.exit_calls = [] self.forbidden_calls = [] fork_called = False self.tail = '' self.setgid = False self.setuid = False self.setgroups = False self.chroot = False self.chdir = False self.chroot_near_chdir = False self.mktemp = False self.forbidden_functions = Config.getOption("WarnOnFunction") if self.forbidden_functions: for name, func in self.forbidden_functions.items(): # precompile regexps f_name = func['f_name'] func['f_regex'] = create_nonlibc_regexp_call(f_name) if 'good_param' in func: func['waiver_regex'] = re.compile(func['good_param']) # register descriptions addDetails(name, func['description']) is_debug = path.endswith('.debug') # Currently this implementation works only on specific # architectures due to reliance on arch specific assembly. if (pkg.arch.startswith('armv') or pkg.arch == 'aarch64'): # 10450: ebffffec bl 10408 <chroot@plt> self.objdump_call_regex = re.compile(br'\sbl\s+(.*)') elif (pkg.arch.endswith('86') or pkg.arch == 'x86_64'): # 401eb8: e8 c3 f0 ff ff callq 400f80 <chdir@plt> self.objdump_call_regex = re.compile(br'callq?\s(.*)') else: self.objdump_call_regex = None res = Pkg.getstatusoutput( ('readelf', '-W', '-S', '-l', '-d', '-s', path)) if not res[0]: lines = res[1].splitlines() for line in lines: r = self.needed_regex.search(line) if r: self.needed.append(r.group(1)) continue r = self.rpath_regex.search(line) if r: for p in r.group(1).split(':'): self.rpath.append(p) continue if self.comment_regex.search(line): self.comment = True continue if self.pic_regex.search(line): self.non_pic = False continue r = self.soname_regex.search(line) if r: self.soname = r.group(1) continue r = self.stack_regex.search(line) if r: self.stack = True flags = r.group(1) if flags and self.stack_exec_regex.search(flags): self.exec_stack = True continue if line.startswith("Symbol table"): break for line in lines: r = self.call_regex.search(line) if not r: continue line = r.group(1) if self.mktemp_call_regex.search(line): self.mktemp = True if self.setgid_call_regex.search(line): self.setgid = True if self.setuid_call_regex.search(line): self.setuid = True if self.setgroups_call_regex.search(line): self.setgroups = True if self.chdir_call_regex.search(line): self.chdir = True if self.chroot_call_regex.search(line): self.chroot = True if self.forbidden_functions: for r_name, func in self.forbidden_functions.items(): ret = func['f_regex'].search(line) if ret: self.forbidden_calls.append(r_name) if is_shlib: r = self.exit_call_regex.search(line) if r: self.exit_calls.append(r.group(1)) continue r = self.fork_call_regex.search(line) if r: fork_called = True continue # check if we don't have a string that will automatically # waive the presence of a forbidden call if self.forbidden_calls: res = Pkg.getstatusoutput(('strings', path)) if not res[0]: for line in res[1].splitlines(): # as we need to remove elements, iterate backwards for i in range(len(self.forbidden_calls) - 1, -1, -1): func = self.forbidden_calls[i] f = self.forbidden_functions[func] if 'waiver_regex' not in f: continue r = f['waiver_regex'].search(line) if r: del self.forbidden_calls[i] if self.non_pic: self.non_pic = 'TEXTREL' in res[1] # Ignore all exit() calls if fork() is being called. # Does not have any context at all but without this kludge, the # number of false positives would probably be intolerable. if fork_called: self.exit_calls = [] # check if chroot is near chdir (since otherwise, chroot is called # without chdir) if not self.objdump_call_regex and self.chroot and self.chdir: # On some architectures, e.g. PPC, it is to difficult to # find the actual invocations of chroot/chdir, if both # exist assume chroot is fine self.chroot_near_chdir = True elif self.chroot and self.chdir: p = subprocess.Popen(('objdump', '-d', path), stdout=subprocess.PIPE, bufsize=-1, env=dict(os.environ, LC_ALL="C")) with p.stdout: index = 0 chroot_index = -99 chdir_index = -99 for line in p.stdout: res = self.objdump_call_regex.search(line) if not res: continue if b'@plt' not in res.group(1): pass elif b'chroot@plt' in res.group(1): chroot_index = index if abs(chroot_index - chdir_index) <= 2: self.chroot_near_chdir = True break elif b'chdir@plt' in res.group(1): chdir_index = index if abs(chroot_index - chdir_index) <= 2: self.chroot_near_chdir = True break index += 1 if p.wait() and not self.chroot_near_chdir: printWarning(pkg, 'binaryinfo-objdump-failed', fname) self.chroot_near_chdir = True # avoid false positive elif chroot_index == -99 and chdir_index == -99: self.chroot_near_chdir = True # avoid false positive else: self.readelf_error = True # Go and others are producing ar archives that don't have ELF # headers, so don't complain about it if not is_ar: printWarning(pkg, 'binaryinfo-readelf-failed', fname, re.sub('\n.*', '', res[1])) try: with open(path, 'rb') as fobj: fobj.seek(-12, os.SEEK_END) self.tail = Pkg.b2s(fobj.read()) except Exception as e: printWarning(pkg, 'binaryinfo-tail-failed %s: %s' % (fname, e)) # Undefined symbol and unused direct dependency checks make sense only # for installed packages. # skip debuginfo: https://bugzilla.redhat.com/190599 if not is_ar and not is_debug and isinstance(pkg, Pkg.InstalledPkg): # We could do this with objdump, but it's _much_ simpler with ldd. res = Pkg.getstatusoutput(('ldd', '-d', '-r', path)) if not res[0]: for line in res[1].splitlines(): undef = self.undef_regex.search(line) if undef: self.undef.append(undef.group(1)) if self.undef: try: res = Pkg.getstatusoutput(['c++filt'] + self.undef) if not res[0]: self.undef = res[1].splitlines() except OSError: pass else: printWarning(pkg, 'ldd-failed', fname) res = Pkg.getstatusoutput(('ldd', '-r', '-u', path)) if res[0]: # Either ldd doesn't grok -u (added in glibc 2.3.4) or we have # unused direct dependencies in_unused = False for line in res[1].splitlines(): if not line.rstrip(): pass elif line.startswith('Unused direct dependencies'): in_unused = True elif in_unused: unused = self.unused_regex.search(line) if unused: self.unused.append(unused.group(1)) else: in_unused = False
def __init__(self, old, new, ignore=None): self.result = [] self.ignore = ignore if self.ignore is None: self.ignore = [] FILEIDX = self.__FILEIDX for tag in self.ignore: for entry in FILEIDX: if tag == entry[0]: entry[1] = None break try: old = self.__load_pkg(old).header new = self.__load_pkg(new).header except KeyError as e: Pkg.warn(str(e)) sys.exit(2) # Compare single tags for tag in self.TAGS: old_tag = old[tag] new_tag = new[tag] if old_tag != new_tag: tagname = rpm.tagnames[tag] if old_tag is None: self.__add(self.FORMAT, (self.ADDED, tagname)) elif new_tag is None: self.__add(self.FORMAT, (self.REMOVED, tagname)) else: self.__add(self.FORMAT, ('S.5.....', tagname)) # compare Provides, Requires, ... for tag in self.PRCO: self.__comparePRCOs(old, new, tag) # compare the files old_files_dict = self.__fileIteratorToDict(old.fiFromHeader()) new_files_dict = self.__fileIteratorToDict(new.fiFromHeader()) files = list( set(itertools.chain(iter(old_files_dict), iter(new_files_dict)))) files.sort() for f in files: diff = False old_file = old_files_dict.get(f) new_file = new_files_dict.get(f) if not old_file: self.__add(self.FORMAT, (self.ADDED, f)) elif not new_file: self.__add(self.FORMAT, (self.REMOVED, f)) else: format = '' for entry in FILEIDX: if entry[1] is not None and \ old_file[entry[1]] != new_file[entry[1]]: format = format + entry[0] diff = True else: format = format + '.' if diff: self.__add(self.FORMAT, (format, f))
def check_aux(self, pkg, files, prog, script, tag, prereq): if script: script_str = Pkg.b2s(script) if prog: if prog not in valid_shells: printError(pkg, 'invalid-shell-in-' + tag, prog) if prog in empty_shells: printError(pkg, 'non-empty-' + tag, prog) if prog in syntaxcheck_shells or prog == '/usr/bin/perl': if percent_regex.search(script_str): printWarning(pkg, 'percent-in-' + tag) if bracket_regex.search(script_str): printWarning(pkg, 'spurious-bracket-in-' + tag) res = dangerous_command_regex.search(script_str) if res: printWarning(pkg, 'dangerous-command-in-' + tag, res.group(2)) res = selinux_regex.search(script_str) if res: printError(pkg, 'forbidden-selinux-command-in-' + tag, res.group(2)) if 'update-menus' in script_str: menu_error = True for f in files: if menu_regex.search(f): menu_error = False break if menu_error: printError(pkg, 'update-menus-without-menu-file-in-' + tag) if tmp_regex.search(script_str): printError(pkg, 'use-tmp-in-' + tag) for c in prereq_assoc: if c[0].search(script_str): found = False for p in c[1]: if p in prereq or p in files: found = True break if not found: printError(pkg, 'no-prereq-on', c[1][0]) if prog in syntaxcheck_shells: if incorrect_shell_script(prog, script): printError(pkg, 'shell-syntax-error-in-' + tag) if home_regex.search(script_str): printError(pkg, 'use-of-home-in-' + tag) res = bogus_var_regex.search(script_str) if res: printWarning(pkg, 'bogus-variable-use-in-' + tag, res.group(1)) if prog == '/usr/bin/perl': if incorrect_perl_script(prog, script): printError(pkg, 'perl-syntax-error-in-' + tag) elif prog.endswith('sh'): res = single_command_regex.search(script_str) if res: printWarning(pkg, 'one-line-command-in-' + tag, res.group(1)) elif prog not in empty_shells and prog in valid_shells: printWarning(pkg, 'empty-' + tag)
def check(self, pkg): packager = pkg[rpm.RPMTAG_PACKAGER] if packager: self._unexpanded_macros(pkg, 'Packager', packager) if Config.getOption('Packager') and \ not packager_regex.search(packager): printWarning(pkg, 'invalid-packager', packager) else: printError(pkg, 'no-packager-tag') version = pkg[rpm.RPMTAG_VERSION] if version: self._unexpanded_macros(pkg, 'Version', version) res = invalid_version_regex.search(version) if res: printError(pkg, 'invalid-version', version) else: printError(pkg, 'no-version-tag') release = pkg[rpm.RPMTAG_RELEASE] if release: self._unexpanded_macros(pkg, 'Release', release) if release_ext and not extension_regex.search(release): printWarning(pkg, 'not-standard-release-extension', release) else: printError(pkg, 'no-release-tag') epoch = pkg[rpm.RPMTAG_EPOCH] if epoch is None: if use_epoch: printError(pkg, 'no-epoch-tag') else: if epoch > 99: printWarning(pkg, 'unreasonable-epoch', epoch) epoch = str(epoch) if use_epoch: for tag in ("obsoletes", "conflicts", "provides", "recommends", "suggests", "enhances", "supplements"): for x in (x for x in getattr(pkg, tag)() if x[1] and x[2][0] is None): printWarning(pkg, 'no-epoch-in-%s' % tag, Pkg.formatRequire(*x)) name = pkg.name deps = pkg.requires() + pkg.prereq() devel_depend = False is_devel = FilesCheck.devel_regex.search(name) is_source = pkg.isSource() for d in deps: value = Pkg.formatRequire(*d) if use_epoch and d[1] and d[2][0] is None and \ not d[0].startswith('rpmlib('): printWarning(pkg, 'no-epoch-in-dependency', value) for r in INVALID_REQUIRES: if r.search(d[0]): printError(pkg, 'invalid-dependency', d[0]) if d[0].startswith('/usr/local/'): printError(pkg, 'invalid-dependency', d[0]) if is_source: if lib_devel_number_regex.search(d[0]): printError(pkg, 'invalid-build-requires', d[0]) elif not is_devel: if not devel_depend and FilesCheck.devel_regex.search(d[0]): printError(pkg, 'devel-dependency', d[0]) devel_depend = True if not d[1]: res = lib_package_regex.search(d[0]) if res and not res.group(1): printError(pkg, 'explicit-lib-dependency', d[0]) if d[1] == rpm.RPMSENSE_EQUAL and d[2][2] is not None: printWarning(pkg, 'requires-on-release', value) self._unexpanded_macros(pkg, 'dependency %s' % (value, ), value) self._unexpanded_macros(pkg, 'Name', name) if not name: printError(pkg, 'no-name-tag') else: if is_devel and not is_source: base = is_devel.group(1) dep = None has_so = False for fname in pkg.files(): if fname.endswith('.so'): has_so = True break if has_so: base_or_libs = base + '/' + base + '-libs/lib' + base # try to match *%_isa as well (e.g. "(x86-64)", "(x86-32)") base_or_libs_re = re.compile( r'^(lib)?%s(-libs)?(\(\w+-\d+\))?$' % re.escape(base)) for d in deps: if base_or_libs_re.match(d[0]): dep = d break if not dep: printWarning(pkg, 'no-dependency-on', base_or_libs) elif version: exp = (epoch, version, None) sexp = Pkg.versionToString(exp) if not dep[1]: printWarning(pkg, 'no-version-dependency-on', base_or_libs, sexp) elif dep[2][:2] != exp[:2]: printWarning( pkg, 'incoherent-version-dependency-on', base_or_libs, Pkg.versionToString( (dep[2][0], dep[2][1], None)), sexp) res = devel_number_regex.search(name) if not res: printWarning(pkg, 'no-major-in-name', name) else: if res.group(3): prov = res.group(1) + res.group(2) + '-devel' else: prov = res.group(1) + '-devel' if prov not in (x[0] for x in pkg.provides()): printWarning(pkg, 'no-provides', prov) # List of words to ignore in spell check ignored_words = set() for pf in pkg.files(): ignored_words.update(pf.split('/')) ignored_words.update((x[0] for x in pkg.provides())) ignored_words.update((x[0] for x in pkg.requires())) ignored_words.update((x[0] for x in pkg.conflicts())) ignored_words.update((x[0] for x in pkg.obsoletes())) langs = pkg[rpm.RPMTAG_HEADERI18NTABLE] summary = pkg[rpm.RPMTAG_SUMMARY] if summary: if not langs: self._unexpanded_macros(pkg, 'Summary', Pkg.b2s(summary)) else: for lang in langs: self.check_summary(pkg, lang, ignored_words) else: printError(pkg, 'no-summary-tag') description = pkg[rpm.RPMTAG_DESCRIPTION] if description: if not langs: self._unexpanded_macros(pkg, '%description', Pkg.b2s(description)) else: for lang in langs: self.check_description(pkg, lang, ignored_words) if len(Pkg.b2s(description)) < len(pkg[rpm.RPMTAG_SUMMARY]): printWarning(pkg, 'description-shorter-than-summary') else: printError(pkg, 'no-description-tag') group = pkg[rpm.RPMTAG_GROUP] self._unexpanded_macros(pkg, 'Group', group) if not group: printError(pkg, 'no-group-tag') elif VALID_GROUPS and group not in VALID_GROUPS: printWarning(pkg, 'non-standard-group', group) buildhost = pkg[rpm.RPMTAG_BUILDHOST] self._unexpanded_macros(pkg, 'BuildHost', buildhost) if not buildhost: printError(pkg, 'no-buildhost-tag') elif Config.getOption('ValidBuildHost') and \ not valid_buildhost_regex.search(buildhost): printWarning(pkg, 'invalid-buildhost', buildhost) changelog = pkg[rpm.RPMTAG_CHANGELOGNAME] if not changelog: printError(pkg, 'no-changelogname-tag') else: clt = pkg[rpm.RPMTAG_CHANGELOGTEXT] if use_version_in_changelog: ret = changelog_version_regex.search(Pkg.b2s(changelog[0])) if not ret and clt: # we also allow the version specified as the first # thing on the first line of the text ret = changelog_text_version_regex.search(Pkg.b2s(clt[0])) if not ret: printWarning(pkg, 'no-version-in-last-changelog') elif version and release: srpm = pkg[rpm.RPMTAG_SOURCERPM] or '' # only check when source name correspond to name if srpm[0:-8] == '%s-%s-%s' % (name, version, release): expected = [version + '-' + release] if epoch is not None: # regardless of use_epoch expected[0] = str(epoch) + ':' + expected[0] # Allow EVR in changelog without release extension, # the extension is often a macro or otherwise dynamic. if release_ext: expected.append( extension_regex.sub('', expected[0])) if ret.group(1) not in expected: if len(expected) == 1: expected = expected[0] printWarning(pkg, 'incoherent-version-in-changelog', ret.group(1), expected) if use_utf8: if clt: changelog = changelog + clt for s in changelog: if not Pkg.is_utf8_bytestr(s): printError(pkg, 'tag-not-utf8', '%changelog') break clt = pkg[rpm.RPMTAG_CHANGELOGTIME][0] if clt: clt -= clt % (24 * 3600) # roll back to 00:00:00, see #246 if clt < oldest_changelog_timestamp: printWarning(pkg, 'changelog-time-overflow', time.strftime("%Y-%m-%d", time.gmtime(clt))) elif clt > time.time(): printError(pkg, 'changelog-time-in-future', time.strftime("%Y-%m-%d", time.gmtime(clt))) # for provide_name in (x[0] for x in pkg.provides()): # if name == provide_name: # printWarning(pkg, 'package-provides-itself') # break def split_license(license): return (x.strip() for x in (l for l in license_regex.split(license) if l)) rpm_license = pkg[rpm.RPMTAG_LICENSE] if not rpm_license: printError(pkg, 'no-license') else: valid_license = True if rpm_license not in VALID_LICENSES: for l1 in split_license(rpm_license): if l1 in VALID_LICENSES: continue for l2 in split_license(l1): if l2 not in VALID_LICENSES: printWarning(pkg, 'invalid-license', l2) valid_license = False if not valid_license: self._unexpanded_macros(pkg, 'License', rpm_license) for tag in ('URL', 'DistURL', 'BugURL'): if hasattr(rpm, 'RPMTAG_%s' % tag.upper()): url = Pkg.b2s(pkg[getattr(rpm, 'RPMTAG_%s' % tag.upper())]) self._unexpanded_macros(pkg, tag, url, is_url=True) if url: (scheme, netloc) = urlparse(url)[0:2] if not scheme or not netloc or "." not in netloc or \ scheme not in ('http', 'https', 'ftp') or \ (Config.getOption('InvalidURL') and invalid_url_regex.search(url)): printWarning(pkg, 'invalid-url', tag, url) else: self.check_url(pkg, tag, url) elif tag == 'URL': printWarning(pkg, 'no-url-tag') obs_names = [x[0] for x in pkg.obsoletes()] prov_names = [x[0] for x in pkg.provides()] for o in (x for x in obs_names if x not in prov_names): printWarning(pkg, 'obsolete-not-provided', o) for o in pkg.obsoletes(): value = Pkg.formatRequire(*o) self._unexpanded_macros(pkg, 'Obsoletes %s' % (value, ), value) # TODO: should take versions, <, <=, =, >=, > into account here # https://bugzilla.redhat.com/460872 useless_provides = set() for p in prov_names: if (prov_names.count(p) != 1 and not p.startswith('debuginfo(') and p not in useless_provides): useless_provides.add(p) for p in sorted(useless_provides): printError(pkg, 'useless-provides', p) for tagname, items in (('Provides', pkg.provides()), ('Conflicts', pkg.conflicts()), ('Obsoletes', pkg.obsoletes()), ('Supplements', pkg.supplements()), ('Suggests', pkg.suggests()), ('Enhances', pkg.enhances()), ('Recommends', pkg.recommends())): for p in items: value = Pkg.formatRequire(*p) self._unexpanded_macros(pkg, '%s %s' % (tagname, value), value) obss = pkg.obsoletes() if obss: provs = pkg.provides() for prov in provs: for obs in obss: if Pkg.rangeCompare(obs, prov): printWarning( pkg, 'self-obsoletion', '%s obsoletes %s' % (Pkg.formatRequire(*obs), Pkg.formatRequire(*prov))) expfmt = rpm.expandMacro("%{_build_name_fmt}") if pkg.isSource(): # _build_name_fmt often (always?) ends up not outputting src/nosrc # as arch for source packages, do it ourselves expfmt = re.sub(r'(?i)%\{?ARCH\b\}?', pkg.arch, expfmt) expected = pkg.header.sprintf(expfmt).split("/")[-1] basename = os.path.basename(pkg.filename) if basename != expected: printWarning(pkg, 'non-coherent-filename', basename, expected) for tag in ('Distribution', 'DistTag', 'ExcludeArch', 'ExcludeOS', 'Vendor'): if hasattr(rpm, 'RPMTAG_%s' % tag.upper()): res = Pkg.b2s(pkg[getattr(rpm, 'RPMTAG_%s' % tag.upper())]) self._unexpanded_macros(pkg, tag, res) for path in private_so_paths: for fname, pkgfile in pkg.files().items(): if fname.startswith(path): for prov in pkgfile.provides: if so_dep_regex.search(prov[0]): printWarning(pkg, "private-shared-object-provides", fname, Pkg.formatRequire(*prov))
def spell_check(pkg, str, fmt, lang, ignored): dict_found = True warned = set() if enchant: if lang == 'C': lang = 'en_US' checker = _enchant_checkers.get(lang) if not checker and lang not in _enchant_checkers: try: checker = enchant.checker.SpellChecker( lang, filters=[ enchant.tokenize.EmailFilter, enchant.tokenize.URLFilter, enchant.tokenize.WikiWordFilter ]) except enchant.DictNotFoundError: printInfo(pkg, 'enchant-dictionary-not-found', lang) pass _enchant_checkers[lang] = checker if checker: # squeeze whitespace to ease leading context check checker.set_text(re.sub(r'\s+', ' ', str)) if use_utf8: uppername = Pkg.to_unicode(pkg.header[rpm.RPMTAG_NAME]).upper() else: uppername = pkg.name.upper() upperparts = uppername.split('-') if lang.startswith('en'): ups = [x + "'S" for x in upperparts] upperparts.extend(ups) for err in checker: # Skip already warned and ignored words if err.word in warned or err.word in ignored: continue # Skip all capitalized words that do not start a sentence if err.word[0].isupper() and not \ sentence_break_regex.search(checker.leading_context(3)): continue upperword = err.word.upper() # Skip all uppercase words if err.word == upperword: continue # Skip errors containing package name or equal to a # "component" of it, case insensitively if uppername in upperword or upperword in upperparts: continue # Work around enchant's digit tokenizing behavior: # http://github.com/rfk/pyenchant/issues/issue/3 if checker.leading_context(1).isdigit() or \ checker.trailing_context(1).isdigit(): continue # Warn and suggest sug = ', '.join(checker.suggest()[:3]) if sug: sug = '-> %s' % sug printWarning(pkg, 'spelling-error', fmt % lang, err.word, sug) warned.add(err.word) else: dict_found = False if not enchant or not dict_found: for seq in str.split(): for word in re.split(r'[^a-z]+', seq.lower()): if len(word) == 0: continue correct = BAD_WORDS.get(word) if not correct: continue if word[0] == '\'': word = word[1:] if word[-1] == '\'': word = word[:-1] if word in warned or word in ignored: continue printWarning(pkg, 'spelling-error', fmt % lang, word, '->', correct) warned.add(word)
'vicefersa': 'vice-versa', 'yur': 'your', 'wheter': 'whether', 'wierd': 'weird', 'xwindows': 'X' } CAPITALIZED_IGNORE_LIST = ('jQuery', 'openSUSE', 'wxWidgets', 'a', 'an', 'uWSGI') DEFAULT_INVALID_REQUIRES = ('^is$', '^not$', '^owned$', '^by$', '^any$', '^package$', r'^libsafe\.so\.') VALID_GROUPS = Config.getOption('ValidGroups', None) if VALID_GROUPS is None: # get defaults from rpm package only if it's not set VALID_GROUPS = Pkg.get_default_valid_rpmgroups() VALID_LICENSES = Config.getOption('ValidLicenses', DEFAULT_VALID_LICENSES) INVALID_REQUIRES = map( re.compile, Config.getOption('InvalidRequires', DEFAULT_INVALID_REQUIRES)) packager_regex = re.compile(Config.getOption('Packager')) changelog_version_regex = re.compile(r'[^>]([^ >]+)\s*$') changelog_text_version_regex = re.compile(r'^\s*-\s*((\d+:)?[\w\.]+-[\w\.]+)') release_ext = Config.getOption('ReleaseExtension') extension_regex = release_ext and re.compile(release_ext) use_version_in_changelog = Config.getOption('UseVersionInChangelog', True) devel_number_regex = re.compile(r'(.*?)([0-9.]+)(_[0-9.]+)?-devel') lib_devel_number_regex = re.compile(r'^lib(.*?)([0-9.]+)(_[0-9.]+)?-devel') invalid_url_regex = re.compile(Config.getOption('InvalidURL'), re.IGNORECASE) lib_package_regex = re.compile( r'(?:^(?:compat-)?lib.*?(\.so.*)?|libs?[\d-]*)$', re.IGNORECASE) leading_space_regex = re.compile(r'^\s+')
def check_spec(self, pkg, spec_file, spec_lines=None): self._spec_file = spec_file spec_only = isinstance(pkg, Pkg.FakePkg) if not spec_lines: spec_lines = Pkg.readlines(spec_file) patches = {} applied_patches = [] applied_patches_ifarch = [] patches_auto_applied = False source_dir = False buildroot = False configure_linenum = None configure_cmdline = '' mklibname = False is_lib_pkg = False if_depth = 0 ifarch_depth = -1 current_section = 'package' buildroot_clean = {'clean': False, 'install': False} depscript_override = False depgen_disabled = False patch_fuzz_override = False indent_spaces = 0 indent_tabs = 0 section = {} # None == main package current_package = None package_noarch = {} if self._spec_file: if not Pkg.is_utf8(self._spec_file): self.output.add_info('E', pkg, 'non-utf8-spec-file', self._spec_name or self._spec_file) # gather info from spec lines pkg.current_linenum = 0 nbsp = UNICODE_NBSP for line in spec_lines: pkg.current_linenum += 1 char = line.find(nbsp) if char != -1: self.output.add_info( 'W', pkg, 'non-break-space', 'line %s, char %d' % (pkg.current_linenum, char)) section_marker = False for sec, regex in section_regexs.items(): res = regex.search(line) if res: current_section = sec section_marker = True section[sec] = section.get(sec, 0) + 1 if sec in ('package', 'files'): rest = filelist_regex.sub('', line[res.end() - 1:]) res = pkgname_regex.search(rest) if res: current_package = res.group(1) else: current_package = None break if section_marker: if not is_lib_pkg and lib_package_regex.search(line): is_lib_pkg = True continue if (current_section in Pkg.RPM_SCRIPTLETS + ('prep', 'build') and contains_buildroot(line)): self.output.add_info('W', pkg, 'rpm-buildroot-usage', '%' + current_section, line[:-1].strip()) if make_check_regex.search(line) and current_section not in \ ('check', 'changelog', 'package', 'description'): self.output.add_info('W', pkg, 'make-check-outside-check-section', line[:-1]) if current_section in buildroot_clean and \ not buildroot_clean[current_section] and \ contains_buildroot(line) and rm_regex.search(line): buildroot_clean[current_section] = True if ifarch_regex.search(line): if_depth = if_depth + 1 ifarch_depth = if_depth if if_regex.search(line): if_depth = if_depth + 1 if setup_regex.match(line): if not setup_q_regex.search(line): # Don't warn if there's a -T without -a or -b if setup_t_regex.search(line): if setup_ab_regex.search(line): self.output.add_info('W', pkg, 'setup-not-quiet') else: self.output.add_info('W', pkg, 'setup-not-quiet') if current_section != 'prep': self.output.add_info('W', pkg, 'setup-not-in-prep') elif autopatch_regex.search(line): patches_auto_applied = True if current_section != 'prep': self.output.add_info('W', pkg, '%autopatch-not-in-prep') else: res = autosetup_regex.search(line) if res: if not autosetup_n_regex.search(res.group(1)): patches_auto_applied = True if current_section != 'prep': self.output.add_info('W', pkg, '%autosetup-not-in-prep') if endif_regex.search(line): if ifarch_depth == if_depth: ifarch_depth = -1 if_depth = if_depth - 1 res = applied_patch_regex.search(line) if res: pnum = res.group(1) or 0 for tmp in applied_patch_p_regex.findall(line) or [pnum]: pnum = int(tmp) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_pipe_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_i_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) if not res and not source_dir: res = source_dir_regex.search(line) if res: source_dir = True self.output.add_info('E', pkg, 'use-of-RPM_SOURCE_DIR') if configure_linenum: if configure_cmdline[-1] == '\\': configure_cmdline = configure_cmdline[:-1] + line.strip() else: res = configure_libdir_spec_regex.search(configure_cmdline) if not res: # Hack to get the correct (start of ./configure) line # number displayed: real_linenum = pkg.current_linenum pkg.current_linenum = configure_linenum self.output.add_info('W', pkg, 'configure-without-libdir-spec') pkg.current_linenum = real_linenum elif res.group(1): res = re.match(hardcoded_library_paths, res.group(1)) if res: self.output.add_info('E', pkg, 'hardcoded-library-path', res.group(1), 'in configure options') configure_linenum = None hashPos = line.find('#') if current_section != 'changelog': cfgPos = line.find('./configure') if cfgPos != -1 and (hashPos == -1 or hashPos > cfgPos): # store line where it started configure_linenum = pkg.current_linenum configure_cmdline = line.strip() res = hardcoded_library_path_regex.search(line) if current_section != 'changelog' and res and not \ (biarch_package_regex.match(pkg.name) or self.hardcoded_lib_path_exceptions_regex.search( res.group(1).lstrip())): self.output.add_info('E', pkg, 'hardcoded-library-path', 'in', res.group(1).lstrip()) if '%mklibname' in line: mklibname = True if current_section == 'package': # Would be cleaner to get sources and patches from the # specfile parsed in Python (see below), but we want to # catch %ifarch'd etc ones as well, and also catch these when # the specfile is not parseable. res = patch_regex.search(line) if res: pnum = int(res.group(1) or 0) patches[pnum] = res.group(2) res = obsolete_tags_regex.search(line) if res: self.output.add_info('W', pkg, 'obsolete-tag', res.group(1)) res = buildroot_regex.search(line) if res: buildroot = True if res.group(1).startswith('/'): self.output.add_info( 'W', pkg, 'hardcoded-path-in-buildroot-tag', res.group(1)) res = buildarch_regex.search(line) if res: if res.group(1) != 'noarch': self.output.add_info( 'E', pkg, 'buildarch-instead-of-exclusivearch-tag', res.group(1)) else: package_noarch[current_package] = True res = packager_regex.search(line) if res: self.output.add_info('W', pkg, 'hardcoded-packager-tag', res.group(1)) res = prefix_regex.search(line) if res: if not res.group(1).startswith('%'): self.output.add_info('W', pkg, 'hardcoded-prefix-tag', res.group(1)) res = prereq_regex.search(line) if res: self.output.add_info('E', pkg, 'prereq-use', res.group(2)) res = buildprereq_regex.search(line) if res: self.output.add_info('E', pkg, 'buildprereq-use', res.group(1)) if scriptlet_requires_regex.search(line): self.output.add_info( 'E', pkg, 'broken-syntax-in-scriptlet-requires', line.strip()) res = requires_regex.search(line) if res: reqs = Pkg.parse_deps(res.group(1)) for req in unversioned(reqs): if compop_regex.search(req): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', req) res = provides_regex.search(line) if res: provs = Pkg.parse_deps(res.group(1)) for prov in unversioned(provs): if not prov.startswith('/'): self.output.add_info( 'W', pkg, 'unversioned-explicit-provides', prov) if compop_regex.search(prov): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', prov) res = obsoletes_regex.search(line) if res: obses = Pkg.parse_deps(res.group(1)) for obs in unversioned(obses): if not obs.startswith('/'): self.output.add_info( 'W', pkg, 'unversioned-explicit-obsoletes', obs) if compop_regex.search(obs): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', obs) res = conflicts_regex.search(line) if res: confs = Pkg.parse_deps(res.group(1)) for conf in unversioned(confs): if compop_regex.search(conf): self.output.add_info( 'W', pkg, 'comparison-operator-in-deptoken', conf) if current_section == 'changelog': for match in macro_regex.findall(line): res = re.match('%+', match) if len(res.group(0)) % 2: self.output.add_info('W', pkg, 'macro-in-%changelog', match) else: if not depscript_override: depscript_override = \ depscript_override_regex.search(line) is not None if not depgen_disabled: depgen_disabled = \ depgen_disable_regex.search(line) is not None if not patch_fuzz_override: patch_fuzz_override = \ patch_fuzz_override_regex.search(line) is not None if current_section == 'files': # TODO: check scriptlets for these too? if package_noarch.get(current_package) or \ (current_package not in package_noarch and package_noarch.get(None)): res = libdir_regex.search(line) if res: pkgname = current_package if pkgname is None: pkgname = '(main package)' self.output.add_info('W', pkg, 'libdir-macro-in-noarch-package', pkgname, line.rstrip()) if not indent_tabs and '\t' in line: indent_tabs = pkg.current_linenum if not indent_spaces and indent_spaces_regex.search(line): indent_spaces = pkg.current_linenum # Check if egrep or fgrep is used if current_section not in \ ('package', 'changelog', 'description', 'files'): greps = deprecated_grep_regex.findall(line) if greps: self.output.add_info('W', pkg, 'deprecated-grep', greps) # If not checking spec file only, we're checking one inside a # SRPM -> skip this check to avoid duplicate warnings (#167) if spec_only and self.valid_groups and \ line.lower().startswith('group:'): group = line[6:].strip() if group not in self.valid_groups: self.output.add_info('W', pkg, 'non-standard-group', group) # Test if there are macros in comments if hashPos != -1 and \ (hashPos == 0 or line[hashPos - 1] in (' ', '\t')): for match in macro_regex.findall(line[hashPos + 1:]): res = re.match('%+', match) if len(res.group(0)) % 2: self.output.add_info('W', pkg, 'macro-in-comment', match) # Last line read is not useful after this point pkg.current_linenum = None for sect in (x for x in buildroot_clean if not buildroot_clean[x]): self.output.add_info('W', pkg, 'no-cleaning-of-buildroot', '%' + sect) if not buildroot: self.output.add_info('W', pkg, 'no-buildroot-tag') for sec in ('prep', 'build', 'install', 'clean'): if not section.get(sec): self.output.add_info('W', pkg, 'no-%%%s-section' % sec) for sec in ('changelog', ): # prep, build, install, clean, check prevented by rpmbuild 4.4 if section.get(sec, 0) > 1: self.output.add_info('W', pkg, 'more-than-one-%%%s-section' % sec) if is_lib_pkg and not mklibname: self.output.add_info('E', pkg, 'lib-package-without-%mklibname') if depscript_override and not depgen_disabled: self.output.add_info('W', pkg, 'depscript-without-disabling-depgen') if patch_fuzz_override: self.output.add_info('W', pkg, 'patch-fuzz-is-changed') if indent_spaces and indent_tabs: pkg.current_linenum = max(indent_spaces, indent_tabs) self.output.add_info( 'W', pkg, 'mixed-use-of-spaces-and-tabs', '(spaces: line %d, tab: line %d)' % (indent_spaces, indent_tabs)) pkg.current_linenum = None # process gathered info if not patches_auto_applied: for pnum, pfile in patches.items(): if pnum in applied_patches_ifarch: self.output.add_info('W', pkg, '%ifarch-applied-patch', 'Patch%d:' % pnum, pfile) if pnum not in applied_patches: self.output.add_info('W', pkg, 'patch-not-applied', 'Patch%d:' % pnum, pfile) # Rest of the checks require a real spec file if not self._spec_file: return # We'd like to parse the specfile only once using python bindings, # but it seems errors from rpmlib get logged to stderr and we can't # capture and print them nicely, so we do it once each way :P out = Pkg.getstatusoutput( ('rpm', '-q', '--qf=', '-D', '_sourcedir %s' % pkg.dirName(), '--specfile', self._spec_file)) parse_error = False for line in out[1].splitlines(): # No such file or dir hack: https://bugzilla.redhat.com/487855 if 'No such file or directory' not in line: parse_error = True self.output.add_info('E', pkg, 'specfile-error', line) if not parse_error: # grab sources and patches from parsed spec object to get # them with macros expanded for URL checking spec_obj = None rpm.addMacro('_sourcedir', pkg.dirName()) try: ts = rpm.TransactionSet() spec_obj = ts.parseSpec(self._spec_file) except (ValueError, rpm.error): # errors logged above already pass rpm.delMacro('_sourcedir') if spec_obj: try: # rpm < 4.8.0 sources = spec_obj.sources() except TypeError: # rpm >= 4.8.0 sources = spec_obj.sources for src in sources: (url, num, flags) = src (scheme, netloc) = urlparse(url)[0:2] if flags & 1: # rpmspec.h, rpm.org ticket #123 srctype = 'Source' else: srctype = 'Patch' tag = '%s%s' % (srctype, num) if scheme and netloc: info = self.check_url(pkg, tag, url) if not info or not hasattr(pkg, 'files'): continue clen = info.get('Content-Length') if clen is not None: clen = int(clen) cmd5 = info.get('Content-MD5') if cmd5 is not None: cmd5 = cmd5.lower() if clen is not None or cmd5 is not None: # Not using path from urlparse results to match how # rpm itself parses the basename. pkgfile = pkg.files().get(url.split('/')[-1]) if pkgfile: if clen is not None and pkgfile.size != clen: self.output.add_info( 'W', pkg, 'file-size-mismatch', '%s = %s, %s = %s' % (pkgfile.name, pkgfile.size, url, clen)) # pkgfile.md5 could be some other digest than # MD5, treat as MD5 only if it's 32 chars long if cmd5 and len(pkgfile.md5) == 32 \ and pkgfile.md5 != cmd5: self.output.add_info( 'W', pkg, 'file-md5-mismatch', '%s = %s, %s = %s' % (pkgfile.name, pkgfile.md5, url, cmd5)) elif srctype == 'Source' and tarball_regex.search(url): self.output.add_info('W', pkg, 'invalid-url', '%s:' % tag, url)
def test_range_compare(): for (req, prov) in ((('foo', rpm.RPMSENSE_LESS, (None, '1.0', None)), ('foo', rpm.RPMSENSE_EQUAL, ('1', '0.5', None))), ): assert not Pkg.rangeCompare(req, prov)
def check_binary(self, pkg): files = pkg.files() menus = [] for fname, pkgfile in files.items(): # Check menu files res = menu_file_regex.search(fname) mode = pkgfile.mode if res: basename = res.group(1) if not stat.S_ISREG(mode): printError(pkg, 'non-file-in-menu-dir', fname) else: if basename != pkg.name: printWarning(pkg, 'non-coherent-menu-filename', fname) if mode & 0o444 != 0o444: printError(pkg, 'non-readable-menu-file', fname) if mode & 0o111: printError(pkg, 'executable-menu-file', fname) menus.append(fname) else: # Check old menus from KDE and GNOME res = old_menu_file_regex.search(fname) if res: if stat.S_ISREG(mode): printError(pkg, 'old-menu-entry', fname) else: # Check non transparent xpm files res = xpm_ext_regex.search(fname) if res: if stat.S_ISREG(mode) and not pkg.grep( 'None",', fname): printWarning(pkg, 'non-transparent-xpm', fname) if fname.startswith('/usr/lib64/menu'): printError(pkg, 'menu-in-wrong-dir', fname) if menus: postin = pkg[rpm.RPMTAG_POSTIN] or \ pkg.scriptprog(rpm.RPMTAG_POSTINPROG) if not postin: printError(pkg, 'menu-without-postin') elif not update_menus_regex.search(postin): printError(pkg, 'postin-without-update-menus') postun = pkg[rpm.RPMTAG_POSTUN] or \ pkg.scriptprog(rpm.RPMTAG_POSTUNPROG) if not postun: printError(pkg, 'menu-without-postun') elif not update_menus_regex.search(postun): printError(pkg, 'postun-without-update-menus') directory = pkg.dirName() for f in menus: # remove comments and handle cpp continuation lines cmd = Pkg.getstatusoutput(('/lib/cpp', directory + f), True)[1] for line in cmd.splitlines(): if not line.startswith('?'): continue res = package_regex.search(line) if res: package = res.group(1) if package != pkg.name: printWarning(pkg, 'incoherent-package-value-in-menu', package, f) else: printInfo(pkg, 'unable-to-parse-menu-entry', line) command = True res = command_regex.search(line) if res: command_line = (res.group(1) or res.group(2)).split() command = command_line[0] for launcher in launchers: if not launcher[0].search(command): continue found = False if launcher[1]: found = '/bin/' + command_line[0] in files or \ '/usr/bin/' + command_line[0] in files or \ '/usr/X11R6/bin/' + command_line[0] \ in files if not found: for l in launcher[1]: if l in pkg.req_names(): found = True break if not found: printError( pkg, 'use-of-launcher-in-menu-but-no-requires-on', launcher[1][0]) command = command_line[1] break if command[0] == '/': if command not in files: printWarning(pkg, 'menu-command-not-in-package', command) elif not ('/bin/' + command in files or '/usr/bin/' + command in files or '/usr/X11R6/bin/' + command in files): printWarning(pkg, 'menu-command-not-in-package', command) else: printWarning(pkg, 'missing-menu-command') command = False res = longtitle_regex.search(line) if res: grp = res.groups() title = grp[1] or grp[2] if title[0] != title[0].upper(): printWarning(pkg, 'menu-longtitle-not-capitalized', title) res = version_regex.search(title) if res: printWarning(pkg, 'version-in-menu-longtitle', title) else: printError(pkg, 'no-longtitle-in-menu', f) title = None res = title_regex.search(line) if res: grp = res.groups() title = grp[1] or grp[2] if title[0] != title[0].upper(): printWarning(pkg, 'menu-title-not-capitalized', title) res = version_regex.search(title) if res: printWarning(pkg, 'version-in-menu-title', title) if '/' in title: printError(pkg, 'invalid-title', title) else: printError(pkg, 'no-title-in-menu', f) title = None res = needs_regex.search(line) if res: grp = res.groups() needs = (grp[1] or grp[2]).lower() if needs in ('x11', 'text', 'wm'): res = section_regex.search(line) if res: grp = res.groups() section = grp[1] or grp[2] # don't warn entries for sections if command and section not in valid_sections: printError(pkg, 'invalid-menu-section', section, f) else: printInfo(pkg, 'unable-to-parse-menu-section', line) elif needs not in standard_needs: printInfo(pkg, 'strange-needs', needs, f) else: printInfo(pkg, 'unable-to-parse-menu-needs', line) res = icon_regex.search(line) if res: icon = res.group(1) if not icon_ext_regex.search(icon): printWarning(pkg, 'invalid-menu-icon-type', icon) if icon[0] == '/' and needs == 'x11': printWarning(pkg, 'hardcoded-path-in-menu-icon', icon) else: for path in icon_paths: if (path[0] + icon) not in files: printError( pkg, path[1] + '-icon-not-in-package', icon, f) else: printWarning(pkg, 'no-icon-in-menu', title) res = xdg_migrated_regex.search(line) if res: if not res.group(1).lower() == "true": printError(pkg, 'non-xdg-migrated-menu') else: printError(pkg, 'non-xdg-migrated-menu')
def test_b2s(): for thing in ("foo", ["foo"], None, []): assert thing == Pkg.b2s(thing) assert "foo" == Pkg.b2s(b"foo") assert ["foo"] == Pkg.b2s([b"foo"])
def check_binary(self, pkg): initscript_list = [] for fname, pkgfile in pkg.files().items(): if not fname.startswith('/etc/init.d/') and \ not fname.startswith('/etc/rc.d/init.d/'): continue basename = os.path.basename(fname) initscript_list.append(basename) if pkgfile.mode & 0o500 != 0o500: self.output.add_info('E', pkg, 'init-script-non-executable', fname) if '.' in basename: self.output.add_info('E', pkg, 'init-script-name-with-dot', fname) # check chkconfig call in %post and %preun postin = pkg[rpm.RPMTAG_POSTIN] or \ pkg.scriptprog(rpm.RPMTAG_POSTINPROG) if not postin: self.output.add_info('E', pkg, 'init-script-without-chkconfig-postin', fname) elif not chkconfig_regex.search(postin): self.output.add_info('E', pkg, 'postin-without-chkconfig', fname) preun = pkg[rpm.RPMTAG_PREUN] or \ pkg.scriptprog(rpm.RPMTAG_PREUNPROG) if not preun: self.output.add_info('E', pkg, 'init-script-without-chkconfig-preun', fname) elif not chkconfig_regex.search(preun): self.output.add_info('E', pkg, 'preun-without-chkconfig', fname) status_found = False reload_found = False chkconfig_content_found = False subsys_regex_found = False in_lsb_tag = False in_lsb_description = False lastline = '' lsb_tags = {} # check common error in file content content = None try: content = [x for x in Pkg.readlines(pkgfile.path)] except Exception as e: self.output.add_info('W', pkg, 'read-error', e) continue content_str = ''.join(content) for line in content: line = line[:-1] # chomp # TODO check if there is only one line like this if line.startswith('### BEGIN INIT INFO'): in_lsb_tag = True continue if line.endswith('### END INIT INFO'): in_lsb_tag = False for kw, vals in lsb_tags.items(): if len(vals) != 1: self.output.add_info('E', pkg, 'redundant-lsb-keyword', kw) for kw in RECOMMENDED_LSB_KEYWORDS: if kw not in lsb_tags: self.output.add_info('W', pkg, 'missing-lsb-keyword', '%s in %s' % (kw, fname)) if in_lsb_tag: # TODO maybe we do not have to handle this ? if lastline.endswith('\\'): line = lastline + line else: res = lsb_tags_regex.search(line) if not res: cres = lsb_cont_regex.search(line) if not (in_lsb_description and cres): in_lsb_description = False self.output.add_info('E', pkg, 'malformed-line-in-lsb-comment-block', line) else: lsb_tags['Description'][-1] += \ ' ' + cres.group(1) else: tag = res.group(1) if not tag.startswith('X-') and \ tag not in LSB_KEYWORDS: self.output.add_info('E', pkg, 'unknown-lsb-keyword', line) else: in_lsb_description = (tag == 'Description') if tag not in lsb_tags: lsb_tags[tag] = [] lsb_tags[tag].append(res.group(2)) lastline = line if not status_found and status_regex.search(line): status_found = True if not reload_found and reload_regex.search(line): reload_found = True res = chkconfig_content_regex.search(line) if res: chkconfig_content_found = True if self.use_deflevels: if res.group(1) == '-': self.output.add_info('W', pkg, 'no-default-runlevel', fname) elif res.group(1) != '-': self.output.add_info('W', pkg, 'service-default-enabled', fname) res = subsys_regex.search(line) if res: subsys_regex_found = True name = res.group(1) if self.use_subsys and name != basename: error = True if name[0] == '$': value = Pkg.substitute_shell_vars(name, content_str) if value == basename: error = False else: i = name.find('}') if i != -1: name = name[0:i] error = name != basename if error and len(name): if name[0] == '$': self.output.add_info('W', pkg, 'incoherent-subsys', fname, name) else: self.output.add_info('E', pkg, 'incoherent-subsys', fname, name) if 'Default-Start' in lsb_tags: if ''.join(lsb_tags['Default-Start']): self.output.add_info('W', pkg, 'service-default-enabled', fname) if not status_found: self.output.add_info('E', pkg, 'no-status-entry', fname) if not reload_found: self.output.add_info('W', pkg, 'no-reload-entry', fname) if not chkconfig_content_found: self.output.add_info('E', pkg, 'no-chkconfig-line', fname) if not subsys_regex_found and self.use_subsys: self.output.add_info('E', pkg, 'subsys-not-used', fname) elif subsys_regex_found and not self.use_subsys: self.output.add_info('E', pkg, 'subsys-unsupported', fname) if len(initscript_list) == 1: pkgname = re.sub('-sysvinit$', '', pkg.name.lower()) goodnames = (pkgname, pkgname + 'd') if initscript_list[0] not in goodnames: self.output.add_info('W', pkg, 'incoherent-init-script-name', initscript_list[0], str(goodnames))