def run(self, context):
     """
     Includes must be at the start of the file
     You cannot include anything that isn't an header file
     """
     i = 0
     filetype = ""
     if context.check_token(i, "INCLUDE") is False:
         return False, 0
     if type(context.scope
             ) is not GlobalScope or context.scope.include_allowed == False:
         context.new_error("INCLUDE_START_FILE", context.peek_token(i))
         return True, i
     val = context.peek_token(i).value.split("include", 1)[1]
     content = Lexer(val, context.peek_token(i).pos[0])
     tkns = content.get_tokens()
     i = 1
     while i < len(tkns) and tkns[i].type in ["TAB", "SPACE"]:
         i += 1
     if i < len(tkns) and tkns[i].type == "LESS_THAN":
         i = len(tkns) - 1
         while i > 0:
             if i < len(tkns) - 1 and tkns[i].type == "DOT":
                 i += 1
                 filetype = tkns[i].value
                 break
             i -= 1
     elif i < len(tkns) and tkns[i].type == "STRING":
         try:
             filetype = tkns[i].value.split(".")[-1][0]
         except:
             filetype = ""
     if filetype and filetype != "h":
         context.new_error("INCLUDE_HEADER_ONLY", context.peek_token(0))
     return False, 0
示例#2
0
 def test_file(self, filename):
     stdout = sys.stdout
     sys.stdout = buff = StringIO()
     lexer = Lexer(read_file(filename))
     context = Context(filename.split("/")[-1], lexer.get_tokens(), debug=2)
     registry.run(context, read_file(filename))
     reference_output = read_file(filename.split(".")[0] + ".out")
     sys.stdout = stdout
     self.assertEqual(buff.getvalue(), reference_output)
    def test_files(self):
        files = glob.glob("tests/lexer/files/*.c")
        files.sort()
        for f in files:
            self.__tests += 1
            print(f.split("/")[-1], end=": ")

            try:
                output = Lexer(read_file(f)).check_tokens()
            except TokenError as t:
                self.__failed += 1
                print("Error")
                print(t)
                self.result.append("✗ ")
                continue
            reference_output = read_file(f.split(".")[0] + ".tokens")
            self.assertEqual(output, reference_output)

        print("----------------------------------")
        print(f"Total {self.__tests}")
        print("".join(self.result))
        print(f"Success {self.__success}, Failed {self.__failed}: ", end="")
        print("✅ OK!" if self.__failed == 0 else "❌ Error!")

        sys.exit(0 if self.__failed == 0 else 1)
示例#4
0
 def run(self, context):
     """
     Header protection must be as follows:
     ```
     #ifndef __FILENAME_H__
     # define __FILENAME_H__
     #endif
     ```
     Any header instruction must be within the header protection
     """
     i = 0
     if type(context.scope) is not GlobalScope:
         return False, 0
     if context.check_token(
             i, ["IFNDEF", "ENDIF"]) is False or context.filetype != "h":
         return False, 0
     protection = context.filename.upper().split("/")[-1].replace(".", "_")
     val = context.peek_token(i).value.split(" ")[-1]
     content = Lexer(val, context.peek_token(i).pos[0])
     tkns = content.get_tokens()
     if context.check_token(i, "IFNDEF") is True:
         if (len(tkns) >= 1 and tkns[0].value == protection
                 and context.scope.header_protection == -1
                 and context.preproc_scope_indent == 1):
             if len(context.history) > 1:
                 for i in range(len(context.history) - 2, 0, -1):
                     if context.history[
                             i] != "IsEmptyLine" and context.history[
                                 i] != "IsComment":
                         context.new_error("HEADER_PROT_ALL",
                                           context.peek_token(0))
                         break
             context.scope.header_protection = 0
         elif len(tkns) < 1 or (tkns[0].value != protection
                                and context.scope.header_protection == -1):
             context.new_error("HEADER_PROT_NAME", context.peek_token(0))
     elif context.check_token(i, "ENDIF") is True:
         if context.scope.header_protection == 1 and context.preproc_scope_indent == 0:
             context.scope.header_protection = 2
     return False, 0
示例#5
0
    def main(self):
        print("\n\nTesting error cases:\n")
        i = 1
        for key, val in test_dict.items():
            self.__tests += 1
            ref_output = f"Error: Unrecognized token line {val[0]}, col {val[1]}"
            func = Lexer(key).check_tokens
            self.assertRaises(func, ref_output, f"Test {i}: " + repr(str(key)))
            i += 1

        print("----------------------------------")
        print(f"Total {self.__tests}")
        print("".join(self.result))
        print(f"Success {self.__success}, Failed {self.__failed}: ", end="")
        print("✅ OK!" if self.__failed == 0 else "❌ Error!")

        sys.exit(0 if self.__failed == 0 else 1)
示例#6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "file",
        help="File(s) or folder(s) you wanna run the parser on. If no file provided, runs on current folder.",
        default=[],
        action="append",
        nargs="*",
    )
    parser.add_argument(
        "-d",
        "--debug",
        action="count",
        help="Debug output (multiple values available)",
        default=0,
    )
    parser.add_argument(
        "-o",
        "--only-filename",
        action="store_true",
        help="By default norminette displays the full path to the file, this allows to show only filename",
        default=False,
    )
    parser.add_argument("-v", "--version", action="version", version="norminette " + str(__version__))
    parser.add_argument(
        "--cfile",
        action="store",
        help="Store C file content directly instead of filename",
    )
    parser.add_argument(
        "--hfile",
        action="store",
        help="Store header file content directly instead of filename",
    )
    parser.add_argument("-R", nargs=1, help="compatibility for norminette 2")
    args = parser.parse_args()
    registry = Registry()
    targets = []
    has_err = None
    content = None

    debug = args.debug
    if args.cfile != None or args.hfile != None:
        targets = ["file.c"] if args.cfile else ["file.h"]
        content = args.cfile if args.cfile else args.hfile
    else:
        args.file = args.file[0]
        if args.file == [[]] or args.file == []:
            targets = glob.glob("**/*.[ch]", recursive=True)
        else:
            for arg in args.file:
                if os.path.exists(arg) is False:
                    print(f"'{arg}' no such file or directory")
                elif os.path.isdir(arg):
                    if arg[-1] != "/":
                        arg = arg + "/"
                    targets.extend(glob.glob(arg + "**/*.[ch]", recursive=True))
                elif os.path.isfile(arg):
                    targets.append(arg)
    event = []
    for target in targets:
        if target[-2:] not in [".c", ".h"]:
            print(f"Error: {target} is not valid C or C header file")
        else:
            try:
                event.append(Event())
                if content == None:
                    with open(target) as f:
                        source = f.read()
                else:
                    source = content
                lexer = Lexer(source)
                tokens = lexer.get_tokens()
                if args.only_filename == True:
                    target = target.split("/")[-1]
                context = Context(target, tokens, debug, args.R)
                registry.run(context, source)
                event[-1].set()
                if context.errors:
                    has_err = True
            except TokenError as e:
                has_err = True
                print(target + f": Error!\n\t{colors(e.msg, 'red')}")
                event[-1].set()
            except CParsingError as e:
                has_err = True
                print(target + f": Error!\n\t{colors(e.msg, 'red')}")
                event[-1].set()
            except KeyboardInterrupt as e:
                event[-1].set()
                sys.exit(1)
    sys.exit(1 if has_err else 0)
示例#7
0
    def run(self, context):
        """
        Preprocessor statements must be defined only in the global scope
        Defined names must be in capital letters
        Define cannot contain newlines
        Define can only contain constant values, such as integers and strings
        """
        i = context.skip_ws(0)
        if len(context.history
               ) > 1 and context.history[-2] == "IsFuncDeclaration":
            self.check_function_declaration(context)
        if type(context.scope) is not GlobalScope:
            if type(context.scope
                    ) == Function and context.scope.multiline == False:
                pass
            else:
                context.new_error("PREPROC_GLOBAL", context.peek_token(0))
        if context.check_token(i, "DEFINE") is False:
            return False, 0
        val = context.peek_token(i).value.split("define", 1)[1]
        content = Lexer(val, context.peek_token(i).pos[0])
        tkns = content.get_tokens()
        i = 0
        identifiers = []
        protection = context.filename.upper().split("/")[-1].replace(".", "_")
        for tkn in tkns:
            if tkn.type == "ESCAPED_NEWLINE":
                context.new_error("NEWLINE_DEFINE", tkn)
            elif tkn.type in ["TAB", "SPACE"]:
                i += 1
                continue
            elif tkn.type == "IDENTIFIER" and len(identifiers) == 0:
                if tkn.value.isupper() is False:
                    context.new_error("MACRO_NAME_CAPITAL", tkn)
                identifiers.append(tkn)
                tmp = i
                while tmp < len(tkns) - 1 and tkns[tmp].type in [
                        "SPACE",
                        "TAB",
                        "IDENTIFIER",
                ]:
                    tmp += 1
                if tmp == (len(tkns) - 1) and context.filetype == "h":
                    if context.scope.header_protection == 0:
                        if identifiers[0].value == protection:
                            context.scope.header_protection = 1
                        elif identifiers[0].value != protection:
                            context.new_error("HEADER_PROT_NAME", tkns[1])
                elif (context.filetype == "c"
                      and context.scope.include_allowed == True
                      and (len(tkns) > tmp + 1 or
                           (len(tkns) == tmp + 1
                            and identifiers[0].value != protection
                            and context.scope.header_protection == -1))):
                    context.scope.include_allowed = False

            elif tkn.type in ["IDENTIFIER", "STRING", "CONSTANT"]:
                if len(identifiers) == 1:
                    if tkn.type == "IDENTIFIER" and tkn.value.isupper(
                    ) is False:
                        context.new_error("PREPROC_CONSTANT", tkn)
                    identifiers.append(tkn)
                elif len(identifiers) == 0:
                    context.new_error("INCORRECT_DEFINE", tkn)
                else:
                    context.new_error("TOO_MANY_VALS", tkn)
            elif tkn.type == "LPARENTHESIS":
                if len(identifiers) == 0:
                    continue
                elif len(identifiers) == 1 and tkns[i - 1].type in [
                        "SPACE", "TAB"
                ]:
                    continue
                else:
                    context.new_error("PREPROC_CONSTANT", tkn)
            elif tkn.type in ["LBRACKET", "LBRACE"]:
                context.new_error("PREPROC_CONSTANT", tkn)

            i += 1
        if context.filetype == "h" and context.scope.header_protection != 1:
            context.new_error("HEADER_PROT_ALL", context.peek_token(0))
        return False, 0