示例#1
0
    def testTokenize(self):
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())
        grammar.add('word', re.compile('(?P<content>\w+) *'), WordComponent())

        lexer = lexers.Lexer()

        # Basic
        lexer.tokenize(root, grammar, u'foo bar')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertIsInstance(root(1), FooBar)
        self.assertEqual(root(1).content, u'bar')

        # Fall through
        root = tokens.Token(None)
        lexer.tokenize(root, grammar, u'foo other bar')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertIsInstance(root(1), tokens.Word)
        self.assertNotIsInstance(root(1), FooBar)
        self.assertEqual(root(1).content, u'other')
        self.assertIsInstance(root(2), FooBar)
        self.assertEqual(root(2).content, u'bar')
示例#2
0
    def testTokenizeWithExtraContent(self):
        # Extra
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())

        lexer = lexers.Lexer()
        lexer.tokenize(root, 'foo ???', None, grammar)
        self.assertEqual(root(0).name, 'FooBar')
        self.assertEqual(root(0)['content'], 'foo')
        self.assertEqual(root(1).name, 'ErrorToken')
        self.assertIn('Unprocessed', root(1)['message'])
示例#3
0
    def testTokenizeWithExtraContent(self):
        # Extra
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())

        lexer = lexers.Lexer()
        lexer.tokenize(root, grammar, u'foo ???')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertIsInstance(root(1), tokens.ErrorToken)
        self.assertIn('Unprocessed', root(1).message)
示例#4
0
    def testPatterns(self):
        """
        Test the multiple patterns can be added.

        NOTE: The underlying Storage object that the Grammar class uses is thoroughly tested
              in the test/common/test_Storage.py.
        """
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile(''), Proxy())
        grammar.add('bar', re.compile(''), Proxy())
        self.assertEqual(grammar[0].name, 'foo')
        self.assertEqual(grammar[1].name, 'bar')
        self.assertEqual(grammar['foo'].name, 'foo')
        self.assertEqual(grammar['bar'].name, 'bar')
示例#5
0
    def testGrammar(self):
        grammar = lexers.Grammar()

        with self.assertRaises(exceptions.MooseDocsException) as e:
            grammar.add(1, [], [], '_end')
        self.assertIn("'name' must be of type", e.exception.message)

        with self.assertRaises(exceptions.MooseDocsException) as e:
            grammar.add('foo', 1, [], '_end')
        self.assertIn("'regex' must be of type", e.exception.message)

        with self.assertRaises(exceptions.MooseDocsException) as e:
            grammar.add('foo', re.compile(''), 1, '_end')
        self.assertIn("'function' must be callable", e.exception.message)

        with self.assertRaises(exceptions.MooseDocsException) as e:
            grammar.add('foo', re.compile(''), Proxy(), [])
        self.assertIn("'location' must be of type", e.exception.message)
示例#6
0
    def testTokenize(self):
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())
        grammar.add('word', re.compile('(?P<content>\w+) *'), WordComponent())

        lexer = lexers.Lexer()

        # Basic
        lexer.tokenize(root, 'foo bar', None, grammar)
        self.assertEqual(root(0).name, 'FooBar')
        self.assertEqual(root(0)['content'], 'foo')
        self.assertEqual(root(1).name, 'FooBar')
        self.assertEqual(root(1)['content'], 'bar')

        # Fall through
        root = tokens.Token(None)
        lexer.tokenize(root, 'foo other bar', None, grammar)
        self.assertEqual(root(0).name, 'FooBar')
        self.assertEqual(root(0)['content'], 'foo')
        self.assertEqual(root(1).name, 'Word')
        self.assertEqual(root(1)['content'], 'other')
        self.assertEqual(root(2).name, 'FooBar')
        self.assertEqual(root(2)['content'], 'bar')