def check_syntax_script(prog, commandline, script): if not script: return False # TODO: test that 'prog' is available/executable tmpfd, tmpname = tempfile.mkstemp(prefix='rpmlint.') tmpfile = os.fdopen(tmpfd, 'wb') try: tmpfile.write(script) tmpfile.close() ret = Pkg.getstatusoutput((prog, commandline, tmpname)) finally: tmpfile.close() os.remove(tmpname) return ret[0]
def check_file(self, pkg, filename): root = pkg.dirName() f = root + filename st = getstatusoutput(('desktop-file-validate', f), True) if st[0]: error_printed = False for line in st[1].splitlines(): if 'error: ' in line: self.output.add_info('E', pkg, 'invalid-desktopfile', filename, line.split('error: ')[1]) error_printed = True if not error_printed: self.output.add_info('E', pkg, 'invalid-desktopfile', filename) if not is_utf8(f): self.output.add_info('E', pkg, 'non-utf8-desktopfile', filename) self.parse_desktop_file(pkg, root, f, filename)
def check_file(self, pkg, filename): root = pkg.dirName() f = root + filename checker = self.appdata_checker if checker[0] == 'appstream-util' and not self.network_enabled: checker += ('--nonet', ) validation_failed = False try: st = getstatusoutput(checker + (f, )) # Return code nonzero? validation_failed = (st[0] != 0) except OSError: # checker is not installed, do a validation manually try: ET.parse(pkg.dirName() + filename) except ET.ParseError: validation_failed = True if validation_failed: self.output.add_info('E', pkg, 'invalid-appdata-file', filename)
def __init__(self, config, output, pkg, path, fname, is_ar, is_shlib): self.readelf_error = False self.needed = [] self.rpath = [] self.undef = [] self.unused = [] self.config = config self.output = output self.comment = False self.soname = False self.non_pic = True self.stack = False self.exec_stack = False self.exit_calls = [] self.forbidden_calls = [] fork_called = False self.tail = '' self.lto_sections = False self.no_text_in_archive = False self.setgid = False self.setuid = False self.setgroups = False self.mktemp = False self.forbidden_functions = self.config.configuration['WarnOnFunction'] if self.forbidden_functions: for name, func in self.forbidden_functions.items(): # precompile regexps f_name = func['f_name'] func['f_regex'] = create_nonlibc_regexp_call(f_name) if 'good_param' in func and func['good_param']: func['waiver_regex'] = re.compile(func['good_param']) # register descriptions self.output.error_details.update({name: func['description']}) is_debug = path.endswith('.debug') is_archive = path.endswith('.a') res = Pkg.getstatusoutput( ('readelf', '-W', '-S', '-l', '-d', '-s', path)) if not res[0]: lines = res[1].splitlines() # For an archive, test if all .text sections are empty if is_archive: has_text_segment = False non_zero_text_segment = False for line in lines: r = self.text_section_regex.search(line) if r: has_text_segment = True size = int(r.group(1), 16) if size > 0: non_zero_text_segment = True if has_text_segment and not non_zero_text_segment: self.no_text_in_archive = True for line in lines: if self.lto_section_name_prefix in line: self.lto_sections = True r = self.needed_regex.search(line) if r: self.needed.append(r.group(1)) continue r = self.rpath_regex.search(line) if r: for p in r.group(1).split(':'): self.rpath.append(p) continue if self.comment_regex.search(line): self.comment = True continue if self.pic_regex.search(line): self.non_pic = False continue r = self.soname_regex.search(line) if r: self.soname = r.group(1) continue r = self.stack_regex.search(line) if r: self.stack = True flags = r.group(1) if flags and self.stack_exec_regex.search(flags): self.exec_stack = True continue if line.startswith('Symbol table'): break for line in lines: r = self.call_regex.search(line) if not r: continue line = r.group(1) if self.mktemp_call_regex.search(line): self.mktemp = True if self.setgid_call_regex.search(line): self.setgid = True if self.setuid_call_regex.search(line): self.setuid = True if self.setgroups_call_regex.search(line): self.setgroups = True if self.forbidden_functions: for r_name, func in self.forbidden_functions.items(): ret = func['f_regex'].search(line) if ret: self.forbidden_calls.append(r_name) if is_shlib: r = self.exit_call_regex.search(line) if r: self.exit_calls.append(r.group(1)) continue r = self.fork_call_regex.search(line) if r: fork_called = True continue # check if we don't have a string that will automatically # waive the presence of a forbidden call if self.forbidden_calls: res = Pkg.getstatusoutput(('strings', path)) if not res[0]: for line in res[1].splitlines(): # as we need to remove elements, iterate backwards for i in range(len(self.forbidden_calls) - 1, -1, -1): func = self.forbidden_calls[i] f = self.forbidden_functions[func] if 'waiver_regex' not in f: continue r = f['waiver_regex'].search(line) if r: del self.forbidden_calls[i] if self.non_pic: self.non_pic = 'TEXTREL' in res[1] # Ignore all exit() calls if fork() is being called. # Does not have any context at all but without this kludge, the # number of false positives would probably be intolerable. if fork_called: self.exit_calls = [] else: self.readelf_error = True # Go and others are producing ar archives that don't have ELF # headers, so don't complain about it if not is_ar: self.output.add_info('W', pkg, 'binaryinfo-readelf-failed', fname, re.sub('\n.*', '', res[1])) try: with open(path, 'rb') as fobj: fobj.seek(-12, os.SEEK_END) self.tail = byte_to_string(fobj.read()) except Exception as e: self.output.add_info('W', pkg, 'binaryinfo-tail-failed %s: %s' % (fname, e)) # Undefined symbol and unused direct dependency checks make sense only # for installed packages. # skip debuginfo: https://bugzilla.redhat.com/190599 if not is_ar and not is_debug and isinstance(pkg, Pkg.InstalledPkg): # We could do this with objdump, but it's _much_ simpler with ldd. res = Pkg.getstatusoutput(('ldd', '-d', '-r', path)) if not res[0]: for line in res[1].splitlines(): undef = self.undef_regex.search(line) if undef: self.undef.append(undef.group(1)) if self.undef: try: res = Pkg.getstatusoutput(['c++filt'] + self.undef) if not res[0]: self.undef = res[1].splitlines() except OSError: pass else: self.output.add_info('W', pkg, 'ldd-failed', fname) res = Pkg.getstatusoutput(('ldd', '-r', '-u', path)) if res[0]: # Either ldd doesn't grok -u (added in glibc 2.3.4) or we have # unused direct dependencies in_unused = False for line in res[1].splitlines(): if not line.rstrip(): pass elif line.startswith('Unused direct dependencies'): in_unused = True elif in_unused: unused = self.unused_regex.search(line) if unused: self.unused.append(unused.group(1)) else: in_unused = False
def __init__(self, config, output, pkg, path, fname, is_ar, is_shlib): self.readelf_error = False self.needed = [] self.rpath = [] self.undef = [] self.unused = [] self.config = config self.output = output self.comment = False self.soname = False self.non_pic = True self.stack = False self.exec_stack = False self.exit_calls = [] self.forbidden_calls = [] fork_called = False self.tail = '' self.setgid = False self.setuid = False self.setgroups = False self.chroot = False self.chdir = False self.chroot_near_chdir = False self.mktemp = False self.forbidden_functions = self.config.configuration['WarnOnFunction'] if self.forbidden_functions: for name, func in self.forbidden_functions.items(): # precompile regexps f_name = func['f_name'] func['f_regex'] = create_nonlibc_regexp_call(f_name) if func['good_param']: func['waiver_regex'] = re.compile(func['good_param']) # register descriptions self.output.error_details.update({name: func['description']}) is_debug = path.endswith('.debug') # Currently this implementation works only on specific # architectures due to reliance on arch specific assembly. if (pkg.arch.startswith('armv') or pkg.arch == 'aarch64'): # 10450: ebffffec bl 10408 <chroot@plt> self.objdump_call_regex = re.compile(br'\sbl\s+(.*)') elif (pkg.arch.endswith('86') or pkg.arch == 'x86_64'): # 401eb8: e8 c3 f0 ff ff callq 400f80 <chdir@plt> self.objdump_call_regex = re.compile(br'callq?\s(.*)') else: self.objdump_call_regex = None res = Pkg.getstatusoutput( ('readelf', '-W', '-S', '-l', '-d', '-s', path)) if not res[0]: lines = res[1].splitlines() for line in lines: r = self.needed_regex.search(line) if r: self.needed.append(r.group(1)) continue r = self.rpath_regex.search(line) if r: for p in r.group(1).split(':'): self.rpath.append(p) continue if self.comment_regex.search(line): self.comment = True continue if self.pic_regex.search(line): self.non_pic = False continue r = self.soname_regex.search(line) if r: self.soname = r.group(1) continue r = self.stack_regex.search(line) if r: self.stack = True flags = r.group(1) if flags and self.stack_exec_regex.search(flags): self.exec_stack = True continue if line.startswith('Symbol table'): break for line in lines: r = self.call_regex.search(line) if not r: continue line = r.group(1) if self.mktemp_call_regex.search(line): self.mktemp = True if self.setgid_call_regex.search(line): self.setgid = True if self.setuid_call_regex.search(line): self.setuid = True if self.setgroups_call_regex.search(line): self.setgroups = True if self.chdir_call_regex.search(line): self.chdir = True if self.chroot_call_regex.search(line): self.chroot = True if self.forbidden_functions: for r_name, func in self.forbidden_functions.items(): ret = func['f_regex'].search(line) if ret: self.forbidden_calls.append(r_name) if is_shlib: r = self.exit_call_regex.search(line) if r: self.exit_calls.append(r.group(1)) continue r = self.fork_call_regex.search(line) if r: fork_called = True continue # check if we don't have a string that will automatically # waive the presence of a forbidden call if self.forbidden_calls: res = Pkg.getstatusoutput(('strings', path)) if not res[0]: for line in res[1].splitlines(): # as we need to remove elements, iterate backwards for i in range(len(self.forbidden_calls) - 1, -1, -1): func = self.forbidden_calls[i] f = self.forbidden_functions[func] if 'waiver_regex' not in f: continue r = f['waiver_regex'].search(line) if r: del self.forbidden_calls[i] if self.non_pic: self.non_pic = 'TEXTREL' in res[1] # Ignore all exit() calls if fork() is being called. # Does not have any context at all but without this kludge, the # number of false positives would probably be intolerable. if fork_called: self.exit_calls = [] # check if chroot is near chdir (since otherwise, chroot is called # without chdir) if not self.objdump_call_regex and self.chroot and self.chdir: # On some architectures, e.g. PPC, it is to difficult to # find the actual invocations of chroot/chdir, if both # exist assume chroot is fine self.chroot_near_chdir = True elif self.chroot and self.chdir: p = subprocess.Popen(('objdump', '-d', path), stdout=subprocess.PIPE, bufsize=-1, env=dict(os.environ, LC_ALL='C')) with p.stdout: index = 0 chroot_index = -99 chdir_index = -99 for line in p.stdout: res = self.objdump_call_regex.search(line) if not res: continue if b'@plt' not in res.group(1): pass elif b'chroot@plt' in res.group(1): chroot_index = index if abs(chroot_index - chdir_index) <= 2: self.chroot_near_chdir = True break elif b'chdir@plt' in res.group(1): chdir_index = index if abs(chroot_index - chdir_index) <= 2: self.chroot_near_chdir = True break index += 1 if p.wait() and not self.chroot_near_chdir: self.output.add_info('W', pkg, 'binaryinfo-objdump-failed', fname) self.chroot_near_chdir = True # avoid false positive elif chroot_index == -99 and chdir_index == -99: self.chroot_near_chdir = True # avoid false positive else: self.readelf_error = True # Go and others are producing ar archives that don't have ELF # headers, so don't complain about it if not is_ar: self.output.add_info('W', pkg, 'binaryinfo-readelf-failed', fname, re.sub('\n.*', '', res[1])) try: with open(path, 'rb') as fobj: fobj.seek(-12, os.SEEK_END) self.tail = byte_to_string(fobj.read()) except Exception as e: self.output.add_info('W', pkg, 'binaryinfo-tail-failed %s: %s' % (fname, e)) # Undefined symbol and unused direct dependency checks make sense only # for installed packages. # skip debuginfo: https://bugzilla.redhat.com/190599 if not is_ar and not is_debug and isinstance(pkg, Pkg.InstalledPkg): # We could do this with objdump, but it's _much_ simpler with ldd. res = Pkg.getstatusoutput(('ldd', '-d', '-r', path)) if not res[0]: for line in res[1].splitlines(): undef = self.undef_regex.search(line) if undef: self.undef.append(undef.group(1)) if self.undef: try: res = Pkg.getstatusoutput(['c++filt'] + self.undef) if not res[0]: self.undef = res[1].splitlines() except OSError: pass else: self.output.add_info('W', pkg, 'ldd-failed', fname) res = Pkg.getstatusoutput(('ldd', '-r', '-u', path)) if res[0]: # Either ldd doesn't grok -u (added in glibc 2.3.4) or we have # unused direct dependencies in_unused = False for line in res[1].splitlines(): if not line.rstrip(): pass elif line.startswith('Unused direct dependencies'): in_unused = True elif in_unused: unused = self.unused_regex.search(line) if unused: self.unused.append(unused.group(1)) else: in_unused = False
def check_binary(self, pkg): files = pkg.files() menus = [] for fname, pkgfile in files.items(): # Check menu files res = menu_file_regex.search(fname) mode = pkgfile.mode if res: basename = res.group(1) if not stat.S_ISREG(mode): self.output.add_info('E', pkg, 'non-file-in-menu-dir', fname) else: if basename != pkg.name: self.output.add_info('W', pkg, 'non-coherent-menu-filename', fname) if mode & 0o444 != 0o444: self.output.add_info('E', pkg, 'non-readable-menu-file', fname) if mode & 0o111: self.output.add_info('E', pkg, 'executable-menu-file', fname) menus.append(fname) else: # Check old menus from KDE and GNOME res = old_menu_file_regex.search(fname) if res: if stat.S_ISREG(mode): self.output.add_info('E', pkg, 'old-menu-entry', fname) else: # Check non transparent xpm files res = xpm_ext_regex.search(fname) if res: if stat.S_ISREG(mode) and not pkg.grep( 'None",', fname): self.output.add_info('W', pkg, 'non-transparent-xpm', fname) if fname.startswith('/usr/lib64/menu'): self.output.add_info('E', pkg, 'menu-in-wrong-dir', fname) if menus: postin = pkg[rpm.RPMTAG_POSTIN] or \ pkg.scriptprog(rpm.RPMTAG_POSTINPROG) if not postin: self.output.add_info('E', pkg, 'menu-without-postin') elif not update_menus_regex.search(postin): self.output.add_info('E', pkg, 'postin-without-update-menus') postun = pkg[rpm.RPMTAG_POSTUN] or \ pkg.scriptprog(rpm.RPMTAG_POSTUNPROG) if not postun: self.output.add_info('E', pkg, 'menu-without-postun') elif not update_menus_regex.search(postun): self.output.add_info('E', pkg, 'postun-without-update-menus') directory = pkg.dirName() for f in menus: # remove comments and handle cpp continuation lines cmd = Pkg.getstatusoutput(('/lib/cpp', directory + f), True)[1] for line in cmd.splitlines(): if not line.startswith('?'): continue res = package_regex.search(line) if res: package = res.group(1) if package != pkg.name: self.output.add_info( 'W', pkg, 'incoherent-package-value-in-menu', package, f) else: self.output.add_info('I', pkg, 'unable-to-parse-menu-entry', line) command = True res = command_regex.search(line) if res: command_line = (res.group(1) or res.group(2)).split() command = command_line[0] for launcher in self.launchers.values(): if not launcher['regexp'].search(command): continue found = False if launcher['binaries']: found = '/bin/' + command_line[0] in files or \ '/usr/bin/' + command_line[0] in files or \ '/usr/X11R6/bin/' + command_line[0] \ in files if not found: for l in launcher['binaries']: if l in pkg.req_names(): found = True break if not found: self.output.add_info( 'E', pkg, 'use-of-launcher-in-menu-but-no-requires-on', launcher['binaries'][0]) command = command_line[1] break if command[0] == '/': if command not in files: self.output.add_info( 'W', pkg, 'menu-command-not-in-package', command) elif not ('/bin/' + command in files or '/usr/bin/' + command in files or '/usr/X11R6/bin/' + command in files): self.output.add_info( 'W', pkg, 'menu-command-not-in-package', command) else: self.output.add_info('W', pkg, 'missing-menu-command') command = False res = longtitle_regex.search(line) if res: grp = res.groups() title = grp[1] or grp[2] if title[0] != title[0].upper(): self.output.add_info( 'W', pkg, 'menu-longtitle-not-capitalized', title) res = version_regex.search(title) if res: self.output.add_info('W', pkg, 'version-in-menu-longtitle', title) else: self.output.add_info('E', pkg, 'no-longtitle-in-menu', f) title = None res = title_regex.search(line) if res: grp = res.groups() title = grp[1] or grp[2] if title[0] != title[0].upper(): self.output.add_info('W', pkg, 'menu-title-not-capitalized', title) res = version_regex.search(title) if res: self.output.add_info('W', pkg, 'version-in-menu-title', title) if '/' in title: self.output.add_info('E', pkg, 'invalid-title', title) else: self.output.add_info('E', pkg, 'no-title-in-menu', f) title = None res = needs_regex.search(line) if res: grp = res.groups() needs = (grp[1] or grp[2]).lower() if needs in ('x11', 'text', 'wm'): res = section_regex.search(line) if res: grp = res.groups() section = grp[1] or grp[2] # don't warn entries for sections if command and section not in self.valid_sections: self.output.add_info( 'E', pkg, 'invalid-menu-section', section, f) else: self.output.add_info( 'I', pkg, 'unable-to-parse-menu-section', line) elif needs not in self.standard_needs: self.output.add_info('I', pkg, 'strange-needs', needs, f) else: self.output.add_info('I', pkg, 'unable-to-parse-menu-needs', line) res = icon_regex.search(line) if res: icon = res.group(1) if not self.icon_ext_regex.search(icon): self.output.add_info('W', pkg, 'invalid-menu-icon-type', icon) if icon[0] == '/' and needs == 'x11': self.output.add_info( 'W', pkg, 'hardcoded-path-in-menu-icon', icon) else: for value in self.icon_paths.values(): if (value['path'] + icon) not in files: self.output.add_info( 'E', pkg, value['type'] + '-icon-not-in-package', icon, f) else: self.output.add_info('W', pkg, 'no-icon-in-menu', title) res = xdg_migrated_regex.search(line) if res: if not res.group(1).lower() == 'true': self.output.add_info('E', pkg, 'non-xdg-migrated-menu') else: self.output.add_info('E', pkg, 'non-xdg-migrated-menu')
def check_spec(self, pkg): self._spec_file = pkg.name spec_only = isinstance(pkg, Pkg.FakePkg) spec_lines = Pkg.readlines(self._spec_file) patches = {} applied_patches = [] applied_patches_ifarch = [] patches_auto_applied = False source_dir = False buildroot = False configure_linenum = None configure_cmdline = '' mklibname = False is_lib_pkg = False if_depth = 0 ifarch_depth = -1 current_section = 'package' buildroot_clean = {'clean': False, 'install': False} depscript_override = False depgen_disabled = False patch_fuzz_override = False indent_spaces = 0 indent_tabs = 0 section = {} # None == main package current_package = None package_noarch = {} if self._spec_file: if not Pkg.is_utf8(self._spec_file): self.output.add_info('E', pkg, 'non-utf8-spec-file', self._spec_name or self._spec_file) # gather info from spec lines pkg.current_linenum = 0 nbsp = UNICODE_NBSP for line in spec_lines: pkg.current_linenum += 1 char = line.find(nbsp) if char != -1: self.output.add_info('W', pkg, 'non-break-space', 'line %s, char %d' % (pkg.current_linenum, char)) section_marker = False for sec, regex in section_regexs.items(): res = regex.search(line) if res: current_section = sec section_marker = True section[sec] = section.get(sec, 0) + 1 if sec in ('package', 'files'): rest = filelist_regex.sub('', line[res.end() - 1:]) res = pkgname_regex.search(rest) if res: current_package = res.group(1) else: current_package = None break if section_marker: if not is_lib_pkg and lib_package_regex.search(line): is_lib_pkg = True continue if (current_section in Pkg.RPM_SCRIPTLETS + ('prep', 'build') and contains_buildroot(line)): self.output.add_info('W', pkg, 'rpm-buildroot-usage', '%' + current_section, line[:-1].strip()) if make_check_regex.search(line) and current_section not in \ ('check', 'changelog', 'package', 'description'): self.output.add_info('W', pkg, 'make-check-outside-check-section', line[:-1]) if current_section in buildroot_clean and \ not buildroot_clean[current_section] and \ contains_buildroot(line) and rm_regex.search(line): buildroot_clean[current_section] = True if ifarch_regex.search(line): if_depth = if_depth + 1 ifarch_depth = if_depth if if_regex.search(line): if_depth = if_depth + 1 if setup_regex.match(line): if not setup_q_regex.search(line): # Don't warn if there's a -T without -a or -b if setup_t_regex.search(line): if setup_ab_regex.search(line): self.output.add_info('W', pkg, 'setup-not-quiet') else: self.output.add_info('W', pkg, 'setup-not-quiet') if current_section != 'prep': self.output.add_info('W', pkg, 'setup-not-in-prep') elif autopatch_regex.search(line): patches_auto_applied = True if current_section != 'prep': self.output.add_info('W', pkg, '%autopatch-not-in-prep') else: res = autosetup_regex.search(line) if res: if not autosetup_n_regex.search(res.group(1)): patches_auto_applied = True if current_section != 'prep': self.output.add_info('W', pkg, '%autosetup-not-in-prep') if endif_regex.search(line): if ifarch_depth == if_depth: ifarch_depth = -1 if_depth = if_depth - 1 res = applied_patch_regex.search(line) if res: pnum = res.group(1) or 0 for tmp in applied_patch_p_regex.findall(line) or [pnum]: pnum = int(tmp) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_pipe_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) else: res = applied_patch_i_regex.search(line) if res: pnum = int(res.group(1)) applied_patches.append(pnum) if ifarch_depth > 0: applied_patches_ifarch.append(pnum) if not res and not source_dir: res = source_dir_regex.search(line) if res: source_dir = True self.output.add_info('E', pkg, 'use-of-RPM_SOURCE_DIR') if configure_linenum: if configure_cmdline[-1] == '\\': configure_cmdline = configure_cmdline[:-1] + line.strip() else: res = configure_libdir_spec_regex.search(configure_cmdline) if not res: # Hack to get the correct (start of ./configure) line # number displayed: real_linenum = pkg.current_linenum pkg.current_linenum = configure_linenum self.output.add_info('W', pkg, 'configure-without-libdir-spec') pkg.current_linenum = real_linenum elif res.group(1): res = re.match(hardcoded_library_paths, res.group(1)) if res: self.output.add_info('E', pkg, 'hardcoded-library-path', res.group(1), 'in configure options') configure_linenum = None hashPos = line.find('#') if current_section != 'changelog': cfgPos = line.find('./configure') if cfgPos != -1 and (hashPos == -1 or hashPos > cfgPos): # store line where it started configure_linenum = pkg.current_linenum configure_cmdline = line.strip() res = hardcoded_library_path_regex.search(line) if current_section != 'changelog' and res and not \ (biarch_package_regex.match(pkg.name) or self.hardcoded_lib_path_exceptions_regex.search( res.group(1).lstrip())): self.output.add_info('E', pkg, 'hardcoded-library-path', 'in', res.group(1).lstrip()) if '%mklibname' in line: mklibname = True if current_section == 'package': # Would be cleaner to get sources and patches from the # specfile parsed in Python (see below), but we want to # catch %ifarch'd etc ones as well, and also catch these when # the specfile is not parseable. res = patch_regex.search(line) if res: pnum = int(res.group(1) or 0) patches[pnum] = res.group(2) res = obsolete_tags_regex.search(line) if res: self.output.add_info('W', pkg, 'obsolete-tag', res.group(1)) res = buildroot_regex.search(line) if res: buildroot = True if res.group(1).startswith('/'): self.output.add_info('W', pkg, 'hardcoded-path-in-buildroot-tag', res.group(1)) res = buildarch_regex.search(line) if res: if res.group(1) != 'noarch': self.output.add_info('E', pkg, 'buildarch-instead-of-exclusivearch-tag', res.group(1)) else: package_noarch[current_package] = True res = packager_regex.search(line) if res: self.output.add_info('W', pkg, 'hardcoded-packager-tag', res.group(1)) res = prefix_regex.search(line) if res: if not res.group(1).startswith('%'): self.output.add_info('W', pkg, 'hardcoded-prefix-tag', res.group(1)) res = prereq_regex.search(line) if res: self.output.add_info('E', pkg, 'prereq-use', res.group(2)) res = buildprereq_regex.search(line) if res: self.output.add_info('E', pkg, 'buildprereq-use', res.group(1)) if scriptlet_requires_regex.search(line): self.output.add_info('E', pkg, 'broken-syntax-in-scriptlet-requires', line.strip()) res = requires_regex.search(line) if res: reqs = Pkg.parse_deps(res.group(1)) e = Pkg.has_forbidden_controlchars(reqs) if e: self.output.add_info('E', pkg, 'forbidden-controlchar-found', 'Requires: %s' % e) for req in unversioned(reqs): if compop_regex.search(req): self.output.add_info('W', pkg, 'comparison-operator-in-deptoken', req) res = provides_regex.search(line) if res: provs = Pkg.parse_deps(res.group(1)) e = Pkg.has_forbidden_controlchars(provs) if e: self.output.add_info('E', pkg, 'forbidden-controlchar-found', 'Provides: %s' % e) for prov in unversioned(provs): if not prov.startswith('/'): self.output.add_info('W', pkg, 'unversioned-explicit-provides', prov) if compop_regex.search(prov): self.output.add_info('W', pkg, 'comparison-operator-in-deptoken', prov) res = obsoletes_regex.search(line) if res: obses = Pkg.parse_deps(res.group(1)) e = Pkg.has_forbidden_controlchars(obses) if e: self.output.add_info('E', pkg, 'forbidden-controlchar-found', 'Obsoletes: %s' % e) for obs in unversioned(obses): if not obs.startswith('/'): self.output.add_info('W', pkg, 'unversioned-explicit-obsoletes', obs) if compop_regex.search(obs): self.output.add_info('W', pkg, 'comparison-operator-in-deptoken', obs) res = conflicts_regex.search(line) if res: confs = Pkg.parse_deps(res.group(1)) e = Pkg.has_forbidden_controlchars(confs) if e: self.output.add_info('E', pkg, 'forbidden-controlchar-found', 'Conflicts: %s' % e) for conf in unversioned(confs): if compop_regex.search(conf): self.output.add_info('W', pkg, 'comparison-operator-in-deptoken', conf) if current_section == 'changelog': e = Pkg.has_forbidden_controlchars(line) if e: self.output.add_info('E', pkg, 'forbidden-controlchar-found', '%%changelog: %s' % e) for match in self.macro_regex.findall(line): res = re.match('%+', match) if len(res.group(0)) % 2: self.output.add_info('W', pkg, 'macro-in-%changelog', match) else: if not depscript_override: depscript_override = \ depscript_override_regex.search(line) is not None if not depgen_disabled: depgen_disabled = \ depgen_disable_regex.search(line) is not None if not patch_fuzz_override: patch_fuzz_override = \ patch_fuzz_override_regex.search(line) is not None if current_section == 'files': # TODO: check scriptlets for these too? if package_noarch.get(current_package) or \ (current_package not in package_noarch and package_noarch.get(None)): res = libdir_regex.search(line) if res: pkgname = current_package if pkgname is None: pkgname = '(main package)' self.output.add_info('W', pkg, 'libdir-macro-in-noarch-package', pkgname, line.rstrip()) if not indent_tabs and '\t' in line: indent_tabs = pkg.current_linenum if not indent_spaces and indent_spaces_regex.search(line): indent_spaces = pkg.current_linenum # Check if egrep or fgrep is used if current_section not in \ ('package', 'changelog', 'description', 'files'): greps = deprecated_grep_regex.findall(line) if greps: self.output.add_info('W', pkg, 'deprecated-grep', greps) # If not checking spec file only, we're checking one inside a # SRPM -> skip this check to avoid duplicate warnings (#167) if spec_only and self.valid_groups and \ line.lower().startswith('group:'): group = line[6:].strip() if group not in self.valid_groups: self.output.add_info('W', pkg, 'non-standard-group', group) # Test if there are macros in comments if hashPos != -1 and \ (hashPos == 0 or line[hashPos - 1] in (' ', '\t')): for match in self.macro_regex.findall( line[hashPos + 1:]): res = re.match('%+', match) if len(res.group(0)) % 2: self.output.add_info('W', pkg, 'macro-in-comment', match) # Last line read is not useful after this point pkg.current_linenum = None for sect in (x for x in buildroot_clean if not buildroot_clean[x]): self.output.add_info('W', pkg, 'no-cleaning-of-buildroot', '%' + sect) if not buildroot: self.output.add_info('W', pkg, 'no-buildroot-tag') for sec in ('prep', 'build', 'install', 'clean'): if not section.get(sec): self.output.add_info('W', pkg, 'no-%%%s-section' % sec) for sec in ('changelog',): # prep, build, install, clean, check prevented by rpmbuild 4.4 if section.get(sec, 0) > 1: self.output.add_info('W', pkg, 'more-than-one-%%%s-section' % sec) if is_lib_pkg and not mklibname: self.output.add_info('E', pkg, 'lib-package-without-%mklibname') if depscript_override and not depgen_disabled: self.output.add_info('W', pkg, 'depscript-without-disabling-depgen') if patch_fuzz_override: self.output.add_info('W', pkg, 'patch-fuzz-is-changed') if indent_spaces and indent_tabs: pkg.current_linenum = max(indent_spaces, indent_tabs) self.output.add_info('W', pkg, 'mixed-use-of-spaces-and-tabs', '(spaces: line %d, tab: line %d)' % (indent_spaces, indent_tabs)) pkg.current_linenum = None # process gathered info if not patches_auto_applied: for pnum, pfile in patches.items(): if pnum in applied_patches_ifarch: self.output.add_info('W', pkg, '%ifarch-applied-patch', 'Patch%d:' % pnum, pfile) if pnum not in applied_patches: self.output.add_info('W', pkg, 'patch-not-applied', 'Patch%d:' % pnum, pfile) # Rest of the checks require a real spec file if not self._spec_file: return # We'd like to parse the specfile only once using python bindings, # but it seems errors from rpmlib get logged to stderr and we can't # capture and print them nicely, so we do it once each way :P out = Pkg.getstatusoutput( ('rpm', '-q', '--qf=', '-D', '_sourcedir %s' % Path(self._spec_file).parent, '--specfile', self._spec_file)) parse_error = False for line in out[1].splitlines(): # No such file or dir hack: https://bugzilla.redhat.com/487855 if 'No such file or directory' not in line: parse_error = True self.output.add_info('E', pkg, 'specfile-error', line) if not parse_error: # grab sources and patches from parsed spec object to get # them with macros expanded for URL checking spec_obj = None rpm.addMacro('_sourcedir', pkg.dirName()) try: ts = rpm.TransactionSet() spec_obj = ts.parseSpec(str(self._spec_file)) except (ValueError, rpm.error): # errors logged above already pass rpm.delMacro('_sourcedir') if spec_obj: try: # rpm < 4.8.0 sources = spec_obj.sources() except TypeError: # rpm >= 4.8.0 sources = spec_obj.sources for src in sources: (url, num, flags) = src (scheme, netloc) = urlparse(url)[0:2] if flags & 1: # rpmspec.h, rpm.org ticket #123 srctype = 'Source' else: srctype = 'Patch' tag = '%s%s' % (srctype, num) if scheme and netloc: continue elif srctype == 'Source' and tarball_regex.search(url): self.output.add_info('W', pkg, 'invalid-url', '%s:' % tag, url)