示例#1
0
 def restore_tokens(self, file: _io.BytesIO):
     self.tokens.clear()
     while True:
         flag = int.from_bytes(file.read(1), "big")
         if flag == 0:
             self.tokens.append(stl.Token((stl.EOF, None)))
             break
         else:
             line = int(stl.read_string(file))
             file_name = stl.read_string(file)
             lf = line, file_name
             if flag == 1:
                 token: stl.NumToken = stl.NumToken(lf,
                                                    stl.read_string(file))
             elif flag == 2:
                 token: stl.LiteralToken = stl.LiteralToken(
                     lf, stl.read_string(file))
             elif flag == 3:
                 token: stl.IdToken = stl.IdToken(lf, stl.read_string(file))
             elif flag == 4:
                 token: stl.DocToken = stl.DocToken(lf,
                                                    stl.read_string(file))
             else:
                 raise stl.ParseException("Unknown flag: {}".format(flag))
             self.tokens.append(token)
示例#2
0
    def tokenize_text(self, lines):
        doc = ""
        in_doc = False
        for i in range(len(lines)):
            line_num = i + 1
            tup = (line_num, self.file_name)
            line = lines[i]
            last_index = len(self.tokens)
            in_doc, doc = self.proceed_line(line, tup, in_doc, doc)
            self.find_import(last_index, len(self.tokens))

        self.tokens.append(stl.Token((stl.EOF, None)))
示例#3
0
    def tokenize_file(self, file: _io.TextIOWrapper):
        """
        :param file:
        :return:
        """
        line = file.readline()
        line_num = 1
        in_doc = False
        doc = ""
        while line:
            tup = (line_num, self.file_name)
            last_index = len(self.tokens)
            in_doc, doc = self.proceed_line(line, tup, in_doc, doc)
            self.find_include(last_index, len(self.tokens))
            line = file.readline()
            line_num += 1

        self.tokens.append(stl.Token((stl.EOF, None)))