def test_identifier_in_statement_dollar(self):
     identifier = "_testId$="
     expected = [
         Token('_testId', const.ID, const.ID, 1, '$'),
         Token('=', const.OPERATOR, const.EQUALS, 1, None)
     ]
     result = Lexer().lex(identifier)
     self.assertEqual(expected, result['Tokens'])
Exemple #2
0
 def test_statement_ends_with_colon(self):
     lexer = Lexer()
     identifier = ":"
     regex_match = regex_handler.find_match(identifier)
     styling_handler = StylingHandler(lexer, identifier)
     styling_handler.apply_styling(regex_match)
     styling_handler.check_end_of_statement()
     self.assertTrue(styling_handler.end_of_statement)
 def test_identifier_in_statement_with_space(self):
     identifier = "_testId ="
     expected = [
         Token('_testId', const.ID, const.ID, 1),
         Token('=', const.OPERATOR, const.EQUALS, 1)
     ]
     result = Lexer().lex(identifier)
     self.assertEqual(expected, result["Tokens"])
Exemple #4
0
 def test_integer_with_trailing_points(self):
     identifier = "123."
     expected = [
         Token('123', const.NUMERIC, const.VALUE, 1),
         Token('.', const.SPECIAL_OPERATOR, const.DOT, 1)
     ]
     result = Lexer().lex(identifier)
     self.assertEqual(expected, result[self.TOKENS])
Exemple #5
0
 def __init__(self):
     Lexer.__init__(self)
     self.expected_statement = None
     self.all_statements = []
     self.current_tokens = None
     self.program = []
     self.line_reductions = []
     self.current_output_list = self.all_statements
     self.current_grammar = statement_grammar
     self.number_of_priorities = None
     self.current_priority_level = 0
     self.statement_already_recalled = False
     self.program_already_recalled = False
     self.opening_stmt_indexes = [0]
     self.token_index = 0
     self.current_statement = []
     self.closing_stmt_index = 0
     self.current_index = 0
Exemple #6
0
 def test_multiple_decimal_numbers(self):
     identifier = "123.123.123"
     expected = [
         Token('123.123', const.NUMERIC, const.VALUE, 1),
         Token('.', const.SPECIAL_OPERATOR, const.DOT, 1),
         Token('123', const.NUMERIC, const.VALUE, 1)
     ]
     result = Lexer().lex(identifier)
     self.assertEqual(expected, result[self.TOKENS])
 def test_advanced_indentation_success(self):
     bslint.load_config_file(user_filepath=self.indentation_config_path,
                             default_filepath=TEST_CONFIG_FILE_PATH)
     advanced_indentation_file_path = os.path.join(
         STYLING_TEST_FILES_PATH, 'advanced-indentation.txt')
     file_name = advanced_indentation_file_path
     file = open(file_name, "r+").read()
     expected = []
     result = Lexer().lex(file)
     self.assertEqual(expected, result[const.WARNINGS])
 def test_error_handled_on_last_line_without_return(self):
     error_file_path = os.path.join(TESTS_RESOURCES_PATH,
                                    'error_handling_files/error-file.brs')
     chars = open(error_file_path, "r+").read()
     result = Lexer().lex(chars)
     expected = [
         msg_handler.get_error_msg(err_const.UNMATCHED_QUOTATION_MARK,
                                   ['"error file', 1])
     ]
     self.assertEqual(expected, result["Tokens"])
Exemple #9
0
 def parse(self, characters):
     self.set_number_of_priorities_level()
     try:
         lexing_result = Lexer.lex(self, characters)
         if lexing_result[const.STATUS] == const.SUCCESS:
             self.check_statement_validity(self.tokens[self.current_token_index:])
             self.set_check_program_values()
             self.check_program_validity()
     except custom_exception.ParsingException as exception:
         self.handle_parsing_error(exception.args[0])
     return self.build_return_message()
Exemple #10
0
 def test_lex_whole_file_with_multiple_errors(self):
     skeleton_main_with_errors_file_path = os.path.join(
         LEXING_TEST_FILES_PATH, 'skeleton-main-with-errors.brs')
     chars = open(skeleton_main_with_errors_file_path, "r+").read()
     result = Lexer().lex(chars)
     expected = [
         msg_handler.get_error_msg(err_const.UNMATCHED_QUOTATION_MARK,
                                   ['"roSGScreen)', 2])
     ]
     self.assertEqual(expected, result[const.TOKENS])
     self.assertEqual(const.ERROR, result[const.STATUS])
 def test_indent_with_only_tabs_with_error(self):
     tab_only_indentation_config_path = os.path.join(
         TESTS_CONFIG_PATH, 'indentation/tab-only-indentation.json')
     bslint.load_config_file(user_filepath=tab_only_indentation_config_path,
                             default_filepath=TEST_CONFIG_FILE_PATH)
     indent_with_tabs_only_file_path = os.path.join(
         STYLING_TEST_FILES_PATH, 'indent-with-tabs-only.txt')
     file_name = indent_with_tabs_only_file_path
     file = open(file_name, "r+").read()
     expected = [error.get_error_msg(err_const.TAB_AND_SPACES, [10])]
     result = Lexer().lex(file)
     self.assertEqual(expected, result[const.WARNINGS])
 def test_indentation_error(self):
     bslint.load_config_file(user_filepath=self.indentation_config_path,
                             default_filepath=TEST_CONFIG_FILE_PATH)
     basic_indentation_file_path = os.path.join(STYLING_TEST_FILES_PATH,
                                                'basic-indentation.txt')
     file_name = basic_indentation_file_path
     file = open(file_name, "r+").read()
     expected = [
         error.get_error_msg(err_const.TAB_INDENTATION_ERROR, [4, 2])
     ]
     result = Lexer().lex(file)
     self.assertEqual(expected, result[const.WARNINGS])
 def test_random(self):
     identifier = 'print "[";_logLevel;"]   ";isoDateTime;"   ";message'
     expected = [
         Token('print', const.PRINT_KEYWORD, const.PRINT_KEYWORD, 1, None),
         Token('[', const.STRING, const.VALUE, 1, None),
         Token(';', const.SEMI_COLON, const.SEMI_COLON, 1, None),
         Token('_logLevel', const.ID, const.ID, 1, None),
         Token(';', const.SEMI_COLON, const.SEMI_COLON, 1, None),
         Token(']   ', const.STRING, const.VALUE, 1, None),
         Token(';', const.SEMI_COLON, const.SEMI_COLON, 1, None),
         Token('isoDateTime', const.ID, const.ID, 1, None),
         Token(';', const.SEMI_COLON, const.SEMI_COLON, 1, None),
         Token('   ', const.STRING, const.VALUE, 1, None),
         Token(';', const.SEMI_COLON, const.SEMI_COLON, 1, None),
         Token('message', const.ID, const.ID, 1, None)
     ]
     result = Lexer().lex(identifier)
     self.assertEqual(expected, result['Tokens'])
 def lint_file(self, filepath):
     filename = filepath.replace(os.getcwd() + '/', '')
     if filename.endswith(".brs") or filename.endswith(".bs"):
         self.files.append(filename)
         file_content = self.file_reader(filename)['file_content']
         if self.args.lex:
             lex_result = Lexer().lex(file_content)
         else:
             lex_result = Parser().parse(file_content)
         if lex_result[const.STATUS] == const.ERROR:
             self.handle_lexing_result(filepath, const.ERRORS,
                                       lex_result[const.TOKENS])
         elif lex_result[const.WARNINGS]:
             self.handle_lexing_result(filepath, const.WARNINGS,
                                       lex_result[const.WARNINGS])
     if filepath in self.messages[const.WARNINGS]:
         PROCESS_LOCK.acquire()
         self.print_issues(filepath, const.WARNINGS)
         self.print_file_summary(filepath)
         PROCESS_LOCK.release()
Exemple #15
0
 def lex_warnings_match(self, expected, characters):
     result = Lexer().lex(characters)
     self.assertEqual(expected, result[const.WARNINGS])
 def test_statement_with_2curly_braces(self):
     identifier = "myVar = {\n{\n}"
     lexer = Lexer()
     lexer.lex(identifier)
     self.assertFalse(lexer.handle_style.end_of_statement)
 def test__statement__ends__with__curly__brace(self):
     identifier = "myVar = {\n"
     lexer = Lexer()
     lexer.lex(identifier)
     self.assertFalse(lexer.handle_style.end_of_statement)
Exemple #18
0
 def test_lex_whole_file(self):
     skeleton_main_file_path = os.path.join(LEXING_TEST_FILES_PATH,
                                            'skeleton-main.brs')
     chars = open(skeleton_main_file_path, "r+").read()
     result = Lexer().lex(chars)
     self.assertEqual(const.SUCCESS, result[const.STATUS])
Exemple #19
0
 def test_statement_ends_with_colon_no_change_line(self):
     identifier = "myVar = value: otherVar = otherValue"
     result = Lexer().lex(identifier)
     self.assertEqual(1, result["Tokens"].pop().line_number)
Exemple #20
0
 def test_white_space(self):
     identifier = " "
     expected_result = []
     result = Lexer().lex(identifier)
     self.assertEqual(expected_result, result[self.TOKENS])
Exemple #21
0
 def lex_string(self, expected, string):
     result = Lexer().lex(string)
     self.assertEqual(expected, result[const.WARNINGS])
     self.assertEqual(const.SUCCESS, result[const.STATUS])
Exemple #22
0
 def lex_identifier(self, expected, identifier):
     lexer = Lexer()
     lexer.lex(identifier)
     self.assertEqual(expected, lexer.statements_counter)
Exemple #23
0
 def test_single_quote_comment(self):
     identifier = "' do stuff \n"
     expected = []
     result = Lexer().lex(identifier)
     self.assertEqual(expected, result[self.TOKENS])
Exemple #24
0
 def lex_file(self, expected, file):
     file = open(file, "r+").read()
     result = Lexer().lex(file)
     self.assertEqual(expected, result[const.WARNINGS])
     self.assertEqual(const.SUCCESS, result[const.STATUS])