def test_extensions_checks_location(self):
        class LocationExtension(types.Extension):
            name = 'Foo'
            kind = types.ExtensionKind.NO_ARGS
            locations = [types.ExtensionLocation.MEMBER]

        parser = idl_parser.IdlParser([LocationExtension])
        with self.assertRaises(SyntaxError):
            parser._reader = idl_tokenizer.IdlTokenizer('', '[Foo]')
            parser.read_extensions([types.ExtensionLocation.DEFINITION])

        with self.assertRaises(SyntaxError):
            parser._reader = idl_tokenizer.IdlTokenizer('', '[Foo]')
            locs = [
                types.ExtensionLocation.MIXIN_MEMBER,
                types.ExtensionLocation.TYPE
            ]
            parser.read_extensions(locs)

        # So long as one matches it should work.
        parser._reader = idl_tokenizer.IdlTokenizer('', '[Foo]')
        locs = [types.ExtensionLocation.MEMBER, types.ExtensionLocation.TYPE]
        results = parser.read_extensions(locs)
        self.assertEqual(len(results), 1)
        self.assertExtensionValue(results[0], LocationExtension)
예제 #2
0
 def test_extensions_ident(self):
   parser = idl_parser.IdlParser(ALL_EXTENSIONS)
   parser._reader = idl_tokenizer.IdlTokenizer('', '[Ident=Something]')
   results = parser.read_extensions(
       [idl_parser.IdlExtensionLocation.DEFINITION])
   self.assertEqual(len(results), 1)
   self.assertExtensionValue(results[0], IdentExtension, arg='Something')
예제 #3
0
  def test_extensions_ident_list(self):
    parser = idl_parser.IdlParser(ALL_EXTENSIONS)
    parser._reader = idl_tokenizer.IdlTokenizer(
        '', '[IdentList=(First, Second)]')
    results = parser.read_extensions(
        [idl_parser.IdlExtensionLocation.DEFINITION])
    self.assertEqual(len(results), 1)
    self.assertExtensionValue(
        results[0], IdentListExtension, args=['First', 'Second'])

    # An identifier list can be specified as a single identifier too.
    parser._reader = idl_tokenizer.IdlTokenizer('', '[IdentList=First]')
    results = parser.read_extensions(
        [idl_parser.IdlExtensionLocation.DEFINITION])
    self.assertEqual(len(results), 1)
    self.assertExtensionValue(results[0], IdentListExtension, args=['First'])
 def test_extensions_syntax_error(self):
     bad_code = [
         '[]',
         '[IdentList=',
         '[IdentList=]',
         '[IdentList=optional]',
         '[IdentList=()]',
         '[IdentList=(Foo,]',
         '[IdentList=(Foo,,)]',
         '[IdentList=(Foo Bar)]',
         '[IdentList=(Foo]',
         '[NoArgs,]',
         '[NoArgs,,]',
         '[NoArgs NoArgs]',
         '[NoArgs=foo]',
         '[ArgList=Foo()]',
         '[Ident=(Foo, Bar)]',
         '[IdentList(int i)]',
         '[NamedArgList]',
     ]
     for code in bad_code:
         with self.assertRaises(SyntaxError):
             parser = idl_parser.IdlParser(ALL_EXTENSIONS)
             parser._reader = idl_tokenizer.IdlTokenizer('', code)
             parser.read_extensions([types.ExtensionLocation.DEFINITION])
예제 #5
0
 def test_extensions_no_args(self):
   parser = idl_parser.IdlParser(ALL_EXTENSIONS)
   parser._reader = idl_tokenizer.IdlTokenizer('', '[NoArgs]')
   results = parser.read_extensions(
       [idl_parser.IdlExtensionLocation.DEFINITION])
   self.assertEqual(len(results), 1)
   self.assertExtensionValue(results[0], NoArgsExtension)
예제 #6
0
 def test_extensions_named_arg_list(self):
   parser = idl_parser.IdlParser(ALL_EXTENSIONS)
   parser._reader = idl_tokenizer.IdlTokenizer('', '[NamedArgList=Foo()]')
   results = parser.read_extensions(
       [idl_parser.IdlExtensionLocation.DEFINITION])
   self.assertEqual(len(results), 1)
   self.assertExtensionValue(
       results[0], NamedArgListExtension, argsName='Foo', args=[])
    def test_reads_strings(self):
        reader = idl_tokenizer.IdlTokenizer('file', '"foobar" "baz" ""')

        def check_token(value):
            token = reader.next()
            self.assertEqual(token.type,
                             idl_tokenizer.IdlTokenType.STRING_LITERAL)
            self.assertEqual(token.value, value)

        check_token('foobar')
        check_token('baz')
        check_token('')

        bad_code = ['"foo', '"foo\nbar"']
        for s in bad_code:
            reader = idl_tokenizer.IdlTokenizer('file', s)
            with self.assertRaises(SyntaxError):
                reader.next()
 def test_extensions_multiples(self):
     parser = idl_parser.IdlParser(ALL_EXTENSIONS)
     parser._reader = idl_tokenizer.IdlTokenizer(
         '', '[NoArgs, Ident=first, Ident=second]')
     results = parser.read_extensions([types.ExtensionLocation.DEFINITION])
     self.assertEqual(len(results), 3)
     self.assertExtensionValue(results[0], NoArgsExtension)
     self.assertExtensionValue(results[1], IdentExtension, arg='first')
     self.assertExtensionValue(results[2], IdentExtension, arg='second')
 def test_raises_syntax_error(self):
     bad_code = [
         '/* foobar ', '/*/', '/& foobar ', '/; foobar ', '$', '@', '!',
         '%', '^'
     ]
     for code in bad_code:
         reader = idl_tokenizer.IdlTokenizer('file', code)
         with self.assertRaises(SyntaxError):
             reader.next()
    def test_reads_numbers(self):
        reader = idl_tokenizer.IdlTokenizer(
            'file',
            '1234 0x777 0XabcDEF 3e5;7e-3 -999 -0x345 .123 0 0123 NaN Infinity '
            '-Infinity')

        def check_token(value):
            token = reader.next()
            self.assertEqual(token.type,
                             idl_tokenizer.IdlTokenType.NUMBER_LITERAL)
            if math.isnan(value):
                self.assertTrue(math.isnan(token.value))
            else:
                self.assertEqual(token.value, value)

        check_token(1234)
        check_token(0x777)
        check_token(0xabcdef)
        check_token(3e5)
        self.assertEqual(reader.next().type,
                         idl_tokenizer.IdlTokenType.SEMI_COLON)
        check_token(7e-3)
        check_token(-999)
        check_token(-0x345)
        check_token(.123)
        check_token(0)
        check_token(0o123)
        check_token(float('nan'))
        check_token(float('inf'))
        check_token(float('-inf'))

        bad_code = [
            '--2', '3-3', '5abc', '093', '3ee34', '3.4.2', '3x52', '24yz'
        ]
        for s in bad_code:
            reader = idl_tokenizer.IdlTokenizer('file', s)
            with self.assertRaises(SyntaxError):
                reader.next()
 def test_allows_number_prefixes_in_identifier(self):
     # This ensures the tokenizer doesn't just look at the next few characters.
     reader = idl_tokenizer.IdlTokenizer(
         '', 'InfinityFoo Infinity_Bar Infinity;NaNFoo')
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.IDENTIFIER)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.IDENTIFIER)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.NUMBER_LITERAL)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.SEMI_COLON)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.IDENTIFIER)
예제 #12
0
 def test_extensions_arg_list(self):
   parser = idl_parser.IdlParser(ALL_EXTENSIONS)
   parser._reader = idl_tokenizer.IdlTokenizer(
       '', '[ArgList(int x, optional long y)]')
   results = parser.read_extensions(
       [idl_parser.IdlExtensionLocation.DEFINITION])
   self.assertEqual(len(results), 1)
   args = [
       idl_parser.IdlArgument(
           name='x', optional=False, default=None, is_variadic=False,
           type=idl_parser.IdlType(
               name='int', nullable=False, element_type=None)),
       idl_parser.IdlArgument(
           name='y', optional=True, default=None, is_variadic=False,
           type=idl_parser.IdlType(
               name='long', nullable=False, element_type=None)),
   ]
   self.assertExtensionValue(results[0], ArgListExtension, args=args)
 def test_reads_special_chars(self):
     reader = idl_tokenizer.IdlTokenizer('file', '{}(),;?<>')
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.BEGIN_INTERFACE)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.END_INTERFACE)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.BEGIN_ARGS)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.END_ARGS)
     self.assertEqual(reader.next().type, idl_tokenizer.IdlTokenType.COMMA)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.SEMI_COLON)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.NULLABLE)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.BEGIN_TEMPLATE)
     self.assertEqual(reader.next().type,
                      idl_tokenizer.IdlTokenType.END_TEMPLATE)
     self.assertIs(reader.next(), None)
    def test_arg_list_syntax_error(self):
        bad_code = [
            '(',
            '(int foo',
            '(int foo,)',
            '(optional foo)',
            '(int int foo)',
            '(int optional foo)',
            '(int... foo, int bar)',
            '(int foo = 1)',
            '(optional int foo = bar)',
            '(optional int foo =)',
            '(optional foo... bar)',
            '(optional int foo, int bar)',
        ]

        for code in bad_code:
            with self.assertRaises(SyntaxError):
                parser = idl_parser.IdlParser()
                parser._reader = idl_tokenizer.IdlTokenizer('', code)
                parser.read_arg_list()
    def test_reads_identifiers(self):
        reader = idl_tokenizer.IdlTokenizer(
            'file', 'dictionary foo bar2s;foo foo-bar')
        self.assertEqual(reader.next().type,
                         idl_tokenizer.IdlTokenType.DICTIONARY)

        token = reader.next()
        self.assertEqual(token.type, idl_tokenizer.IdlTokenType.IDENTIFIER)
        self.assertEqual(token.value, 'foo')

        token = reader.next()
        self.assertEqual(token.type, idl_tokenizer.IdlTokenType.IDENTIFIER)
        self.assertEqual(token.value, 'bar2s')

        self.assertEqual(reader.next().type,
                         idl_tokenizer.IdlTokenType.SEMI_COLON)

        token = reader.next()
        self.assertEqual(token.type, idl_tokenizer.IdlTokenType.IDENTIFIER)
        self.assertEqual(token.value, 'foo')

        token = reader.next()
        self.assertEqual(token.type, idl_tokenizer.IdlTokenType.IDENTIFIER)
        self.assertEqual(token.value, 'foo-bar')
    def test_reads_comments(self):
        reader = idl_tokenizer.IdlTokenizer('file', '/** Comments */ foo')
        token = reader.next()
        self.assertEqual(token.type, idl_tokenizer.IdlTokenType.IDENTIFIER)
        self.assertEqual(token.value, 'foo')
        self.assertEqual(token.doc, '/** Comments */')

        # Only handles jsdoc comments: /** */
        reader = idl_tokenizer.IdlTokenizer('file', '/* Comments */ foo')
        self.assertIs(reader.next().doc, None)
        reader = idl_tokenizer.IdlTokenizer('file', '// Comments\nfoo')
        self.assertIs(reader.next().doc, None)

        reader = idl_tokenizer.IdlTokenizer('file', '/**/ foo')
        self.assertIs(reader.peek().doc, None)
        self.assertEqual(reader.peek().value, 'foo')
        reader = idl_tokenizer.IdlTokenizer('file', '/*/ foo */ end')
        self.assertEqual(reader.next().value, 'end')

        # Preserves leading whitespace.
        reader = idl_tokenizer.IdlTokenizer(
            'file', '/* first */\n  /**\n   * foo\n   */ bar')
        self.assertEqual(reader.next().doc, '  /**\n   * foo\n   */')
 def _do_parse(body):
     parser = idl_parser.IdlParser()
     parser._reader = idl_tokenizer.IdlTokenizer('', body)
     return parser.read_arg_list()
 def test_ignores_escapes_in_strings(self):
     reader = idl_tokenizer.IdlTokenizer('file', '"foo\\nbar"')
     token = reader.next()
     self.assertEqual(token.type, idl_tokenizer.IdlTokenType.STRING_LITERAL)
     self.assertEqual(token.value, 'foo\\nbar')