Пример #1
0
 def test_should_return_right_square_bracket_token(self):
     with closing(io.StringIO('{{]')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.PRINT_OPEN),
             Token(Lexem.RIGHT_SQUARE_BRACKET)
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #2
0
 def test_should_return_identifier_token(self):
     with closing(io.StringIO('{%identifier')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.STATEMENT_OPEN),
             Token(Lexem.IDENTIFIER, 'identifier')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #3
0
 def test_should_return_template_token(self):
     with closing(io.StringIO('{%%}')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.STATEMENT_OPEN),
             Token(Lexem.STATEMENT_CLOSE)
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #4
0
 def test_should_return_raise_incomplete_unrecognised_construction(self):
     with closing(io.StringIO('<span>{{^2 + 1}}</span>')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         self.assertEqual(Token(Lexem.HTML, '<span>'),
                          tokenizer.get_next_token())
         self.assertEqual(Token(Lexem.PRINT_OPEN),
                          tokenizer.get_next_token())
         self.assertRaises(ParserSyntaxError, tokenizer.get_next_token)
Пример #5
0
 def test_should_return_boolean_false(self):
     with closing(io.StringIO('{{ False }}')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.PRINT_OPEN),
             Token(Lexem.FALSE),
             Token(Lexem.PRINT_CLOSE)
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #6
0
 def test_should_omit_commented_text(self):
     with closing(io.StringIO(
             '<span>{#Comment {{12.3}}#}</span>')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.HTML, '<span>'),
             Token(Lexem.HTML, '</span>')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #7
0
 def test_should_return_string_token(self):
     with closing(io.StringIO('{{"Hello World"}}')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.PRINT_OPEN),
             Token(Lexem.STRING, 'Hello World'),
             Token(Lexem.PRINT_CLOSE)
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #8
0
 def test_should_omit_whitespaces(self):
     with closing(io.StringIO('<span>{% \n %}</span>')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.HTML, '<span>'),
             Token(Lexem.STATEMENT_OPEN),
             Token(Lexem.STATEMENT_CLOSE),
             Token(Lexem.HTML, '</span>')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #9
0
 def test_should_return_statement_tokens(self):
     with closing(io.StringIO('<span>{{}}</span>')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.HTML, '<span>'),
             Token(Lexem.PRINT_OPEN),
             Token(Lexem.PRINT_CLOSE),
             Token(Lexem.HTML, '</span>')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #10
0
 def test_should_return_token_list_without_whitespaces(self):
     with closing(
             io.StringIO(
                 '<span>{% if condition %}Hello World{% endif %}</span>')
     ) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.HTML, '<span>'),
             Token(Lexem.STATEMENT_OPEN),
             Token(Lexem.IF),
             Token(Lexem.IDENTIFIER, 'condition'),
             Token(Lexem.STATEMENT_CLOSE),
             Token(Lexem.HTML, 'Hello World'),
             Token(Lexem.STATEMENT_OPEN),
             Token(Lexem.ENDIF),
             Token(Lexem.STATEMENT_CLOSE),
             Token(Lexem.HTML, '</span>')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #11
0
 def test_should_return_string_tokens(self):
     with closing(
             io.StringIO(
                 '<span>{{\'str1{{1+2}}b\'"str2\'/{#%"""\'\'}}</span>')
     ) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.HTML, '<span>'),
             Token(Lexem.PRINT_OPEN),
             Token(Lexem.STRING, 'str1{{1+2}}b'),
             Token(Lexem.STRING, 'str2\'/{#%'),
             Token(Lexem.STRING, ''),
             Token(Lexem.STRING, ''),
             Token(Lexem.PRINT_CLOSE),
             Token(Lexem.HTML, '</span>')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #12
0
 def test_should_return_single_html_token(self):
     with closing(io.StringIO('<span>Hello World</span>')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [Token(Lexem.HTML, '<span>Hello World</span>')]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #13
0
 def test_should_return_expression_tokens(self):
     with closing(io.StringIO(
             '<span>{% set var = (2+4%3.0) %}</span>')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [
             Token(Lexem.HTML, '<span>'),
             Token(Lexem.STATEMENT_OPEN),
             Token(Lexem.SET),
             Token(Lexem.IDENTIFIER, 'var'),
             Token(Lexem.ASSIGN),
             Token(Lexem.LEFT_BRACKET),
             Token(Lexem.INT, 2),
             Token(Lexem.PLUS),
             Token(Lexem.INT, 4),
             Token(Lexem.MOD),
             Token(Lexem.NUMBER, 3),
             Token(Lexem.RIGHT_BRACKET),
             Token(Lexem.STATEMENT_CLOSE),
             Token(Lexem.HTML, '</span>')
         ]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #14
0
 def test_should_return_left_bracket_token(self):
     with closing(io.StringIO('{{(')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [Token(Lexem.PRINT_OPEN), Token(Lexem.LEFT_BRACKET)]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #15
0
 def test_should_return_greater_or_equal_token(self):
     with closing(io.StringIO('{{>=')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [Token(Lexem.PRINT_OPEN), Token(Lexem.GE)]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #16
0
 def test_should_return_minus_token(self):
     with closing(io.StringIO('{{-')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [Token(Lexem.PRINT_OPEN), Token(Lexem.MINUS)]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))
Пример #17
0
 def test_should_return_keyword_token(self):
     with closing(io.StringIO('{%macro')) as input_stream:
         tokenizer = Tokenizer(input_stream)
         tokens = [Token(Lexem.STATEMENT_OPEN), Token(Lexem.MACRO)]
         self.assertTokenListEqual(tokens, list(tokenizer.get_tokens()))