示例#1
0
    def testRecursiveInclude(self):
        lxr = lexer.Lexer(version=18, filename="file.p8")
        lxr.process_lines([
            b'#include tests/pico8/lua/ext_recursive.lua\n',
        ])

        self.assertEqual(8, len(lxr._tokens))
示例#2
0
 def testStringMultipleLinesPlusAToken(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'"abc def ghi \nand jkl" and\n')
     self.assertEqual(4, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'),
                      lxr._tokens[0])
     self.assertEqual(lexer.TokKeyword(b'and'), lxr._tokens[2])
示例#3
0
    def testBitwiseRotl(self):
        lxr = lexer.Lexer(version=4)
        lxr.process_lines([
            b'A <<> B',
        ])

        self.assertEqual(5, len(lxr._tokens))
示例#4
0
 def testTokenAndComment(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line('and-- comment text and stuff\n')
     self.assertEqual(3, len(lxr._tokens))
     self.assertEqual(lexer.TokKeyword('and'), lxr._tokens[0])
     self.assertEqual(lexer.TokComment('-- comment text and stuff'),
                      lxr._tokens[1])
示例#5
0
 def testMultilineString(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'[[one\n')
     lxr._process_line(b'"two"\n')
     lxr._process_line(b'[[three]]\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'one\n"two"\n[[three'),
                      lxr._tokens[0])
示例#6
0
 def testMultilineStringMatchedEquals(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'[===[one\n')
     lxr._process_line(b'[[two]]\n')
     lxr._process_line(b'[==[three]==]]===]\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'one\n[[two]]\n[==[three]==]'),
                      lxr._tokens[0])
示例#7
0
 def testMultilineCommentMultipleCalls(self):
     lxr = lexer.Lexer(version=8)
     lxr._process_line(b'--[[comment text\n')
     lxr._process_line(b'and "stuff\n')
     lxr._process_line(b']]\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokComment(b'--[[comment text\nand "stuff\n]]'),
                      lxr._tokens[0])
示例#8
0
 def testTokensProperty(self):
     lxr = lexer.Lexer(version=4)
     lxr.process_lines([
         b'function foo()\n',
         b'  return 999\n',
         b'end\n'
     ])
     self.assertEqual(13, len(lxr.tokens))
示例#9
0
 def testNegatedNumber(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'-1.234567890e-6\n')
     self.assertEqual(3, len(lxr._tokens))
     self.assertEqual(lexer.TokSymbol(b'-'),
                      lxr._tokens[0])
     self.assertEqual(lexer.TokNumber(b'1.234567890e-6'),
                      lxr._tokens[1])
示例#10
0
 def testTokenMatches(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'break')
     self.assertEqual(1, len(lxr._tokens))
     self.assertTrue(lxr._tokens[0].matches(lexer.TokKeyword(b'break')))
     self.assertTrue(lxr._tokens[0].matches(lexer.TokKeyword))
     self.assertFalse(lxr._tokens[0].matches(lexer.TokKeyword(b'and')))
     self.assertFalse(lxr._tokens[0].matches(lexer.TokSpace))
示例#11
0
 def testProcessLines(self):
     lxr = lexer.Lexer(version=4)
     lxr.process_lines([
         'function foo()\n',
         '  return 999\n',
         'end\n'
     ])
     self.assertEqual(13, len(lxr._tokens))
示例#12
0
 def testLexerError(self):
     lxr = lexer.Lexer(version=4)
     try:
         lxr._process_line(b'123 @ 456')
         self.fail()
     except lexer.LexerError as e:
         txt = str(e)  # coverage test
         self.assertEqual(1, e.lineno)
         self.assertEqual(5, e.charno)
示例#13
0
 def testProcessLinesErrorOnOpenMultilineString(self):
     lxr = lexer.Lexer(version=4)
     self.assertRaises(
         lexer.LexerError,
         lxr.process_lines,
         [
             b'[[one\n',
             b'two\n'
         ])
示例#14
0
 def testStringMultipleLines(self):
     # TODO: Pico-8 doesn't allow multiline strings, so this probably
     # shouldn't either.
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'"abc def ghi \n')
     lxr._process_line(b'and jkl"\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'),
                      lxr._tokens[0])
示例#15
0
 def testHelloWorldExample(self):
     code=b'-- hello world\n-- by zep\n\nt = 0\n\nmusic(0)\n\nfunction _update()\n t += 1\nend\n\nfunction _draw()\n cls()\n  \n for i=1,11 do\n  for j0=0,7 do\n  j = 7-j0\n  col = 7+j\n  t1 = t + i*4 - j*2\n  x = cos(t0)*5\n  y = 38 + j + cos(t1/50)*5\n  pal(7,col)\n  spr(16+i, 8+i*8 + x, y)\n  end\n end\n \n  print("this is pico-8",\n    37, 70, 14) --8+(t/4)%8)\n\n print("nice to meet you",\n    34, 80, 12) --8+(t/4)%8)\n\n  spr(1, 64-4, 90)\nend\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'
     lxr = lexer.Lexer(version=4)
     lxr.process_lines([code])
     tokens = lxr.tokens
     self.assertEqual(lexer.TokComment(b'-- hello world'), tokens[0])
     self.assertEqual(lexer.TokNewline(b'\n'), tokens[1])
     self.assertEqual(lexer.TokComment(b'-- by zep'), tokens[2])
     self.assertEqual(lexer.TokNewline(b'\n'), tokens[3])
示例#16
0
    def testPeekOperators(self):
        lxr = lexer.Lexer(version=28)
        lxr.process_lines([
            b"@A\n",
            b"%A\n",
            b"$A\n",
        ])

        self.assertEqual(9, len(lxr._tokens))
示例#17
0
    def testSpecialSymbols(self):
        lxr = lexer.Lexer(version=18)
        lxr.process_lines([
            bytes(
                u'{…,∧,░,➡,⧗,▤,⬆,☉,🅾,◆,█,★,⬇,✽,�,♥,웃,⌂,⬅,▥,�,�,ˇ,▒,♪,�}',
                'utf-8')
        ])

        # 26 characters + 25 commas + 2 braces
        self.assertEqual(53, len(lxr._tokens))
示例#18
0
 def testValidLuaNoErrors(self):
     lxr = lexer.Lexer(version=4)
     for line in VALID_LUA.split(b'\n'):
         lxr._process_line(line)
     tokens = lxr.tokens
     self.assertEqual(lexer.TokName(b'v1'), tokens[0])
     self.assertEqual(lexer.TokSpace(b' '), tokens[1])
     self.assertEqual(lexer.TokSymbol(b'='), tokens[2])
     self.assertEqual(lexer.TokSpace(b' '), tokens[3])
     self.assertEqual(lexer.TokKeyword(b'nil'), tokens[4])
示例#19
0
文件: tool.py 项目: greay/p8advent
def main(orig_args):
    arg_parser = _get_argparser()
    args = arg_parser.parse_args(args=orig_args)

    assert args.lua.endswith('.lua')
    game_fname = args.lua[:-len('.lua')] + '.p8'

    my_game = game.Game.make_empty_game(filename=game_fname)
    my_lexer = lexer.Lexer(version=4)
    with open(args.lua, 'rb') as lua_fh:
        my_lexer.process_lines(lua_fh)

    my_textlib = lzwlib.LzwLib(start_addr=args.startaddr,
                               end_addr=args.endaddr)

    saw_star = False
    for i, token in enumerate(my_lexer._tokens):
        if token.matches(lexer.TokSymbol(b'*')):
            saw_star = True
        elif token.matches(lexer.TokString) and saw_star:
            sid = my_textlib.id_for_string(token.value.decode())
            my_lexer._tokens[i - 1] = lexer.TokSpace(b'')
            my_lexer._tokens[i] = lexer.TokString(str.encode(sid))
            saw_star = False
        else:
            saw_star = False

    textlib_lua = str.encode(my_textlib.generate_lua())
    my_lexer.process_lines([(l + b'\n') for l in textlib_lua.split(b'\n')])

    my_game.lua._lexer = my_lexer
    my_game.lua._parser.process_tokens(my_game.lua._lexer.tokens)

    text_bytes = my_textlib.as_bytes()
    my_game.write_cart_data(text_bytes, args.startaddr)

    with open(game_fname, 'wb') as outstr:
        my_game.to_p8_file(outstr, filename=game_fname)

    return 0
示例#20
0
def get_tokens(s):
    lxr = lexer.Lexer(version=4)
    lxr.process_lines([(l + '\n') for l in s.split('\n')])
    return lxr.tokens
示例#21
0
 def testNumberInteger(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'1234567890\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokNumber(b'1234567890'),
                      lxr._tokens[0])
示例#22
0
 def testStringEscapes(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'"\\\n\\a\\b\\f\\n\\r\\t\\v\\\\\\"\\\'\\65"\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'\n\a\b\f\n\r\t\v\\"\'A'),
                      lxr._tokens[0])
示例#23
0
 def testStringSingleQuotes(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b"'abc def ghi and jkl'\n")
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'abc def ghi and jkl'),
                      lxr._tokens[0])
示例#24
0
 def testProcessLinesErrorOnOpenString(self):
     lxr = lexer.Lexer(version=4)
     self.assertRaises(
         lexer.LexerError,
         lxr.process_lines,
         [b'"one'])
示例#25
0
 def testComment(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'-- comment text and stuff\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokComment(b'-- comment text and stuff'),
                      lxr._tokens[0])
示例#26
0
 def testCommentUnofficialDoubleSlash(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'// comment text and stuff\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokComment(b'// comment text and stuff'),
                      lxr._tokens[0])
示例#27
0
 def testMultilineCommentNoLinebreaks(self):
     lxr = lexer.Lexer(version=8)
     lxr._process_line(b'--[[comment text and "stuff]]\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokComment(b'--[[comment text and "stuff]]'),
                      lxr._tokens[0])
示例#28
0
 def testNumberDecimalNoRightPart(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'1.\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokNumber(b'1.'),
                      lxr._tokens[0])
示例#29
0
 def testNumberDecimalWithExp(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'1.234567890e-6\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokNumber(b'1.234567890e-6'),
                      lxr._tokens[0])
示例#30
0
 def testNumberHexWithFrac(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'0x1234567890abcdef.1bbf\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokNumber(b'0x1234567890abcdef.1bbf'),
                      lxr._tokens[0])