예제 #1
0
    def testTokenize(self):
        root = tokens.Token(None)
        grammer = lexers.Grammer()
        grammer.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())
        grammer.add('word', re.compile('(?P<content>\w+) *'), WordComponent())

        lexer = lexers.Lexer()

        # Basic
        lexer.tokenize(root, grammer, u'foo bar')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertIsInstance(root(1), FooBar)
        self.assertEqual(root(1).content, u'bar')

        # Fall through
        root = tokens.Token(None)
        lexer.tokenize(root, grammer, u'foo other bar')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertIsInstance(root(1), tokens.Word)
        self.assertNotIsInstance(root(1), FooBar)
        self.assertEqual(root(1).content, u'other')
        self.assertIsInstance(root(2), FooBar)
        self.assertEqual(root(2).content, u'bar')
예제 #2
0
    def testTokenizeWithExtraContent(self, mock):
        # Extra
        root = tokens.Token(None)
        grammer = lexers.Grammer()
        grammer.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())

        lexer = lexers.Lexer()
        lexer.tokenize(root, grammer, u'foo ???')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertEqual(mock.call_count, 1)
예제 #3
0
    def testTokenizeWithExtraContent(self):
        # Extra
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())

        lexer = lexers.Lexer()
        lexer.tokenize(root, 'foo ???', None, grammar)
        self.assertEqual(root(0).name, 'FooBar')
        self.assertEqual(root(0)['content'], 'foo')
        self.assertEqual(root(1).name, 'ErrorToken')
        self.assertIn('Unprocessed', root(1)['message'])
예제 #4
0
    def testTokenizeWithExtraContent(self):
        # Extra
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())

        lexer = lexers.Lexer()
        lexer.tokenize(root, grammar, u'foo ???')
        self.assertIsInstance(root(0), FooBar)
        self.assertEqual(root(0).content, u'foo')
        self.assertIsInstance(root(1), tokens.ErrorToken)
        self.assertIn('Unprocessed', root(1).message)
예제 #5
0
    def testTokenize(self):
        root = tokens.Token(None)
        grammar = lexers.Grammar()
        grammar.add('foo', re.compile('(?P<content>\w+) *'), FooBarComponent())
        grammar.add('word', re.compile('(?P<content>\w+) *'), WordComponent())

        lexer = lexers.Lexer()

        # Basic
        lexer.tokenize(root, 'foo bar', None, grammar)
        self.assertEqual(root(0).name, 'FooBar')
        self.assertEqual(root(0)['content'], 'foo')
        self.assertEqual(root(1).name, 'FooBar')
        self.assertEqual(root(1)['content'], 'bar')

        # Fall through
        root = tokens.Token(None)
        lexer.tokenize(root, 'foo other bar', None, grammar)
        self.assertEqual(root(0).name, 'FooBar')
        self.assertEqual(root(0)['content'], 'foo')
        self.assertEqual(root(1).name, 'Word')
        self.assertEqual(root(1)['content'], 'other')
        self.assertEqual(root(2).name, 'FooBar')
        self.assertEqual(root(2)['content'], 'bar')