Ejemplo n.º 1
0
 def test_rem(self):
     self.common.match_regex("REM", 1, const.COMMENT, const.COMMENT)
     identifier = "REM\n"
     expected = "REM"
     result = regex_handler.find_match(identifier)
     self.assertEqual(expected, result["match"].group())
     self.assertEqual(result["token_lexer_type"], const.COMMENT)
     self.assertEqual(result["token_parser_type"], const.COMMENT)
Ejemplo n.º 2
0
 def test_space_new_line(self):
     identifier = " \n"
     expected = ' '
     result = regex_handler.find_match(identifier)
     self.assertEqual(expected, result["match"].group())
     self.assertEqual(1, len(result["match"].group()))
     self.assertEqual(None, result["token_lexer_type"])
     self.assertEqual(None, result["token_parser_type"])
Ejemplo n.º 3
0
 def match_regex(self, identifier, match_group, lexer_type, parser_type):
     result = regex_handler.find_match(identifier)
     if match_group is None:
         self.assertEqual(identifier, result[const.MATCH].group())
     else:
         self.assertEqual(identifier, result[const.MATCH].group(match_group))
     self.assertEqual(lexer_type, result[const.TOKEN_LEXER_TYPE])
     self.assertEqual(parser_type, result[const.TOKEN_PARSER_TYPE])
Ejemplo n.º 4
0
 def test_statement_ends_with_colon(self):
     lexer = Lexer()
     identifier = ":"
     regex_match = regex_handler.find_match(identifier)
     styling_handler = StylingHandler(lexer, identifier)
     styling_handler.apply_styling(regex_match)
     styling_handler.check_end_of_statement()
     self.assertTrue(styling_handler.end_of_statement)
Ejemplo n.º 5
0
 def test_identifier_in_statement_hashtag(self):
     identifier = "_testId#"
     expected_result = "_testId"
     expected_type = "#"
     result = regex_handler.find_match(identifier)
     self.assertEqual(expected_result,
                      result["match"].group(self.VALUE_GROUP))
     self.assertEqual(expected_type, result["match"].group(self.TYPE_GROUP))
     self.assertEqual(const.ID, result["token_lexer_type"])
Ejemplo n.º 6
0
 def create_token_and_handle_styling(self):
     regex_match = regex_handler.find_match(
         self.characters[self.handle_style.current_char_index:])
     self.handle_style.line_length += len(regex_match[MATCH].group())
     self.handle_style.current_char_index += len(regex_match[MATCH].group())
     if regex_match["token_lexer_type"] is not None:
         applied_common_styling = self.handle_style.apply_styling(
             regex_match)
         if applied_common_styling:
             token = self.handle_match.match_handler(regex_match)
             if token is not None:
                 token.line_number = self.handle_style.line_number
                 self.tokens.append(token)
         self.handle_style.check_end_of_statement()
         if self.handle_style.end_of_statement:
             self.check_statement_validity(
                 self.tokens[self.current_token_index:])
             self.handle_style.end_of_statement = False
             self.current_token_index = len(self.tokens)
Ejemplo n.º 7
0
 def test_skip_rem_line_with_text(self):
     identifier = "rem randomText BSLINT_skip_line \n"
     result = regex_handler.find_match(identifier)
     self.assertEqual(const.COMMENT, result["token_lexer_type"])
     self.assertEqual(const.COMMENT, result["token_parser_type"])
Ejemplo n.º 8
0
 def _match(self, token_type, match, identifier):
     result = regex_handler.find_match(identifier)
     self.assertEqual(match, result["match"].group(self.COMMAND_GROUP))
     self.assertEqual(token_type, result["token_lexer_type"])
     self.assertEqual(token_type, result["token_parser_type"])
Ejemplo n.º 9
0
 def match(self, identifier, match_group):
     result = regex_handler.find_match(identifier)
     self.assertEqual(identifier, result[const.MATCH].group(match_group))
     self.assertEqual(const.ID, result[const.TOKEN_LEXER_TYPE])
     self.assertEqual(const.ID, result[const.TOKEN_PARSER_TYPE])