def check_regex(regex_text, min_level, output_stream=sys.stdout): has_errors = False reg = Regex.get_parse_tree(regex_text, 0) if ONLY_FUNC: errs = [] getattr(regexlint.checkers, ONLY_FUNC)(reg, errs) else: errs = run_all_checkers(reg, None) # Special case for empty string, since it needs action. manual_check_for_empty_string_match(reg, errs, (regex_text, Token)) errs.sort(key=lambda k: (k[1], k[0])) if errs: for num, severity, pos1, text in errs: if severity < min_level: continue # Only set this if we're going to output something -- # otherwise the [Lexer] OK won't print has_errors = True line = '#' print('%s%s:%s:%s:%s: %s' % ( logging.getLevelName(severity)[0], num, 'argv', 'root', 0, text), file=output_stream) mark_str(pos1, pos1+1, regex_text, output_stream) if not has_errors: print(repr(regex_text), 'OK', file=output_stream) return output_stream
def check_regex(regex_text, min_level, output_stream=sys.stdout): has_errors = False reg = Regex.get_parse_tree(regex_text, 0) if ONLY_FUNC: errs = [] getattr(regexlint.checkers, ONLY_FUNC)(reg, errs) else: errs = run_all_checkers(reg, None) # Special case for empty string, since it needs action. manual_check_for_empty_string_match(reg, errs, (regex_text, Token)) errs.sort(key=lambda k: (k[1], k[0])) if errs: for num, severity, pos1, text in errs: if severity < min_level: continue # Only set this if we're going to output something -- # otherwise the [Lexer] OK won't print has_errors = True print('%s:%s:%s: %s%s: %s' % ('argv', 'root', 0, logging.getLevelName(severity)[0], num, text), file=output_stream) mark_str(pos1, pos1 + 1, regex_text, output_stream) if not has_errors: print(repr(regex_text), 'OK', file=output_stream) return output_stream
def check_lexer(lexer_name, cls, mod_path, min_level, output_stream=sys.stdout): #print lexer_name #print cls().tokens has_errors = False bygroups_callback = func_code(bygroups(1)) for state, pats in cls().tokens.items(): if not isinstance(pats, list): # This is for Inform7Lexer print(lexer_name, 'WEIRD', file=output_stream) return output_stream for i, pat in enumerate(pats): if hasattr(pat, 'state'): # new 'default' continue try: if isinstance(pat[0], Future): pat = (pat[0].get(),) + pat[1:] reg = Regex.get_parse_tree(pat[0], cls.flags) except TypeError: # Doesn't support _inherit yet. continue except Exception: try: print(pat[0], cls, file=output_stream) except: pass raise # Special problem: display an error if count of args to # bygroups(...) doesn't match the number of capture groups if callable(pat[1]) and func_code(pat[1]) is bygroups_callback: by_groups = func_closure(pat[1]) else: by_groups = None if ONLY_FUNC: errs = [] getattr(regexlint.checkers, ONLY_FUNC)(reg, errs) else: errs = run_all_checkers(reg, by_groups) # Special case for empty string, since it needs action. manual_check_for_empty_string_match(reg, errs, pat) errs.sort(key=lambda k: (k[1], k[0])) if errs: #print "Errors in", lexer_name, state, "pattern", i for num, severity, pos1, text in errs: if severity < min_level: continue # Only set this if we're going to output something -- # otherwise the [Lexer] OK won't print has_errors = True foo = find_offending_line(mod_path, lexer_name, state, i, pos1) if foo: line = 'L' + str(foo[0]) else: line = 'pat#' + str(i+1) print('%s%s:%s:%s:%s: %s' % ( logging.getLevelName(severity)[0], num, lexer_name, state, line, text), file=output_stream) if foo: mark(*(foo + (output_stream,))) else: mark_str(pos1, pos1+1, pat[0], output_stream) if not has_errors: print(lexer_name, 'OK', file=output_stream) return output_stream
def check_lexer(lexer_name, cls, mod_path, min_level, verbose, output_stream=sys.stdout): #print lexer_name #print cls().tokens has_errors = False bygroups_callback = func_code(bygroups(1)) for state, pats in cls().tokens.items(): if not isinstance(pats, list): # This is for Inform7Lexer if verbose: print(lexer_name, 'WEIRD', file=output_stream) return (output_stream, False) for i, pat in enumerate(pats): if hasattr(pat, 'state'): # new 'default' continue ignore_w123 = False try: if isinstance(pat[0], Future): if isinstance(pat[0], words): ignore_w123 = True pat = (pat[0].get(), ) + pat[1:] reg = Regex.get_parse_tree(pat[0], cls.flags) except TypeError: # Doesn't support _inherit yet. continue except Exception: try: print(pat[0], cls, file=output_stream) except: pass raise # Special problem: display an error if count of args to # bygroups(...) doesn't match the number of capture groups if callable(pat[1]) and func_code(pat[1]) is bygroups_callback: by_groups = func_closure(pat[1]) else: by_groups = None if ONLY_FUNC: errs = [] getattr(regexlint.checkers, ONLY_FUNC)(reg, errs) else: errs = run_all_checkers(reg, by_groups) # Special case for empty string, since it needs action. manual_check_for_empty_string_match(reg, errs, pat) errs.sort(key=lambda k: (k[1], k[0])) if ignore_w123: remove_error(errs, '123') if errs: for num, severity, pos1, text in errs: if severity < min_level: continue # Only set this if we're going to output something -- # otherwise the [Lexer] OK won't print has_errors = True foo = find_offending_line(mod_path, lexer_name, state, i, pos1) line = '%s:' % foo[0] if foo else '' patn = 'pat#' + str(i + 1) print('%s:%s (%s:%s:%s) %s%s: %s' % (mod_path, line, lexer_name, state, patn, logging.getLevelName(severity)[0], num, text), file=output_stream) if foo: mark(*(foo + (output_stream, ))) else: mark_str(pos1, pos1 + 1, pat[0], output_stream) if verbose and not has_errors: print(lexer_name, 'OK', file=output_stream) return (output_stream, has_errors)
def check_lexer(lexer_name, cls, mod_path, min_level, output_stream=sys.stdout): #print lexer_name #print cls().tokens has_errors = False bygroups_callback = bygroups(1).func_code for state, pats in cls().tokens.iteritems(): if not isinstance(pats, list): # This is for Inform7Lexer print >>output_stream, lexer_name, 'WEIRD' return output_stream for i, pat in enumerate(pats): if hasattr(pat, 'state'): # new 'default' continue try: if isinstance(pat[0], Future): pat = (pat[0].get(),) + pat[1:] reg = Regex.get_parse_tree(pat[0], cls.flags) except TypeError: # Doesn't support _inherit yet. continue except Exception: try: print >>output_stream, pat[0], cls except: pass raise # Special problem: display an error if count of args to # bygroups(...) doesn't match the number of capture groups if callable(pat[1]) and pat[1].func_code is bygroups_callback: by_groups = pat[1].func_closure[0].cell_contents else: by_groups = None if ONLY_FUNC: errs = [] getattr(regexlint.checkers, ONLY_FUNC)(reg, errs) else: errs = run_all_checkers(reg, by_groups) # Special case for empty string, since it needs action. manual_check_for_empty_string_match(reg, errs, pat) errs.sort(key=lambda k: (k[1], k[0])) if errs: #print "Errors in", lexer_name, state, "pattern", i for num, severity, pos1, text in errs: if severity < min_level: continue # Only set this if we're going to output something -- # otherwise the [Lexer] OK won't print has_errors = True foo = find_offending_line(mod_path, lexer_name, state, i, pos1) if foo: line = 'L' + str(foo[0]) else: line = 'pat#' + str(i+1) print >>output_stream, '%s%s:%s:%s:%s: %s' % ( logging.getLevelName(severity)[0], num, lexer_name, state, line, text) if foo: mark(*(foo + (output_stream,))) else: # Substract one for closing quote start = len(consistent_repr(pat[0][:pos1])) - 1 end = len(consistent_repr(pat[0][:pos1+1])) - 1 if start == end: # This handles the case where pos1 points to the end # of the string. Regex "|" with pos1 = 1. end += 1 assert end > start text, start, end = shorten(repr(pat[0]), start, end) mark(-1, start, end, text, output_stream) if not has_errors: print >>output_stream, lexer_name, "OK" return output_stream