Exemple #1
0
 def testStringMultipleLinesPlusAToken(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'"abc def ghi \nand jkl" and\n')
     self.assertEqual(4, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'),
                      lxr._tokens[0])
     self.assertEqual(lexer.TokKeyword(b'and'), lxr._tokens[2])
Exemple #2
0
 def testMultilineStringMatchedEquals(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'[===[one\n')
     lxr._process_line(b'[[two]]\n')
     lxr._process_line(b'[==[three]==]]===]\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'one\n[[two]]\n[==[three]==]'),
                      lxr._tokens[0])
Exemple #3
0
 def testMultilineString(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'[[one\n')
     lxr._process_line(b'"two"\n')
     lxr._process_line(b'[[three]]\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'one\n"two"\n[[three'),
                      lxr._tokens[0])
Exemple #4
0
 def testStringMultipleLines(self):
     # TODO: Pico-8 doesn't allow multiline strings, so this probably
     # shouldn't either.
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'"abc def ghi \n')
     lxr._process_line(b'and jkl"\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'abc def ghi \nand jkl'),
                      lxr._tokens[0])
Exemple #5
0
def main(orig_args):
    arg_parser = _get_argparser()
    args = arg_parser.parse_args(args=orig_args)

    assert args.lua.endswith('.lua')
    game_fname = args.lua[:-len('.lua')] + '.p8'

    my_game = game.Game.make_empty_game(filename=game_fname)
    my_lexer = lexer.Lexer(version=4)
    with open(args.lua, 'rb') as lua_fh:
        my_lexer.process_lines(lua_fh)

    my_textlib = lzwlib.LzwLib(start_addr=args.startaddr,
                               end_addr=args.endaddr)

    saw_star = False
    for i, token in enumerate(my_lexer._tokens):
        if token.matches(lexer.TokSymbol(b'*')):
            saw_star = True
        elif token.matches(lexer.TokString) and saw_star:
            sid = my_textlib.id_for_string(token.value.decode())
            my_lexer._tokens[i - 1] = lexer.TokSpace(b'')
            my_lexer._tokens[i] = lexer.TokString(str.encode(sid))
            saw_star = False
        else:
            saw_star = False

    textlib_lua = str.encode(my_textlib.generate_lua())
    my_lexer.process_lines([(l + b'\n') for l in textlib_lua.split(b'\n')])

    my_game.lua._lexer = my_lexer
    my_game.lua._parser.process_tokens(my_game.lua._lexer.tokens)

    text_bytes = my_textlib.as_bytes()
    my_game.write_cart_data(text_bytes, args.startaddr)

    with open(game_fname, 'wb') as outstr:
        my_game.to_p8_file(outstr, filename=game_fname)

    return 0
Exemple #6
0
 def testArgsString(self):
     p = get_parser('"string literal"')
     node = p._args()
     self.assertIsNotNone(node)
     self.assertEqual(1, p._pos)
     self.assertTrue(node.matches(lexer.TokString('string literal')))
Exemple #7
0
 def testExpValueString(self):
     p = get_parser('"string literal"')
     node = p._exp()
     self.assertIsNotNone(node)
     self.assertTrue(isinstance(node, parser.ExpValue))
     self.assertTrue(node.value.matches(lexer.TokString('string literal')))
Exemple #8
0
 def testStringEscapes(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b'"\\\n\\a\\b\\f\\n\\r\\t\\v\\\\\\"\\\'\\65"\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'\n\a\b\f\n\r\t\v\\"\'A'),
                      lxr._tokens[0])
Exemple #9
0
 def testStringSingleQuotes(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line(b"'abc def ghi and jkl'\n")
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString(b'abc def ghi and jkl'),
                      lxr._tokens[0])
Exemple #10
0
 def testStringDoubleQuotes(self):
     lxr = lexer.Lexer(version=4)
     lxr._process_line('"abc def ghi and jkl"\n')
     self.assertEqual(2, len(lxr._tokens))
     self.assertEqual(lexer.TokString('abc def ghi and jkl'),
                      lxr._tokens[0])