Esempio n. 1
0
 def testAppendToken(self):
     LexerTest.logger.debug('Test appending a token.')
     
     # Test data
     token = Token('str', Token.ALWAYS_DATA, True, 'Main Token')
     sub_token = Token('str', Token.ALWAYS_DATA, True, 'Sub Token')
     lexer = Lexer(None, 'msg', [], 'Unit Test Lexer')
     lexer.token = token
     
     # Show test data
     LexerTest.logger.debug('Created main token:\n%s' % (token.to_pretty_json()))
     LexerTest.logger.debug('Created sub token:\n%s' % (sub_token.to_pretty_json()))
     LexerTest.logger.debug('Created lexer:\n%s' % (lexer.to_pretty_json()))
     
     # Run test
     lexer.appendToken(sub_token)
     assert len(lexer.sub_tokens) > 0, 'No tokens found in list.'
     added_token = lexer.sub_tokens[-1]
     
     # Show test output
     LexerTest.logger.debug('Token added to lexer:\n%s' % (added_token.to_pretty_json()))
     
     # Verify results
     self.findErrors(lexer)
     
     LexerTest.logger.debug('Test succeeded!')
Esempio n. 2
0
    def createSourceLexers(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Component id lexer
        comp_id_to_end_transition = LexerStateTransition(
            'EOS', -1, 0, True, end_state, 'Component ID to End Transition')
        comp_id_token = Token('component_id', Token.SOMETIMES_DATA, True,
                              'Component ID Token')
        comp_id_state = LexerState([comp_id_to_end_transition], comp_id_token,
                                   False, 'Component ID State')
        comp_id_lexer = Lexer(None, 'component_id', [], 'Component ID Lexer')

        # Component lexer
        comp_to_end_transition = LexerStateTransition(
            'EOS', 0, 0, True, end_state, 'Component to End Transition')
        comp_id_transition = LexerStateTransition(
            '\[', 0, 1, True, comp_id_state,
            'Component to Component ID Transition')
        comp_token = Token('component', Token.ALWAYS_DATA, True,
                           'Component Token')
        comp_state = LexerState([comp_to_end_transition, comp_id_transition],
                                comp_token, False, 'Component State')
        comp_lexer = Lexer(None, 'component', [], 'Component Lexer')

        # Source lexer
        comp_transition = LexerStateTransition(
            '.', 0, -1, False, comp_state, 'Source to Component Transition')
        source_token = Token('source', Token.NEVER_DATA, False, 'Source Token')
        source_state = LexerState([comp_transition], source_token, False,
                                  'Source State')
        source_lexer = Lexer(None, 'source', [], 'Source Lexer')

        return ((source_lexer, comp_lexer, comp_id_lexer), source_state)
Esempio n. 3
0
    def createEventLexer(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Msg lexer
        end_transition = LexerStateTransition('EOS', 0, 0, True, end_state,
                                              'Msg to End Transition')
        msg_token = Token('msg', Token.SOMETIMES_DATA, False,
                          'Outer Msg Token')
        msg_state = LexerState([end_transition], msg_token, False,
                               'Outer Msg State')
        msg_sub_lexers, msg_start_state = self.createMsgLexers()
        msg_lexer = Lexer(msg_start_state, 'msg', msg_sub_lexers,
                          'Outer Msg Lexer')

        # Source lexer
        msg_transition = LexerStateTransition(':', 0, 1, True, msg_state,
                                              'Source to Msg Transition')
        source_token = Token('source', Token.ALWAYS_DATA, False,
                             'Outer Source Token')
        source_state = LexerState([msg_transition], source_token, True,
                                  'Outer Source State')
        source_sub_lexers, source_start_state = self.createSourceLexers()
        source_lexer = Lexer(source_start_state, 'source', source_sub_lexers,
                             'Outer Source Lexer')

        # Datetime lexer
        source_transition = LexerStateTransition(
            '\d{2}:\d{2}:\d{2}', 8, 8, True, source_state,
            'Datetime to Source Transition')
        datetime_token = Token('datetime', Token.ALWAYS_DATA, False,
                               'Outer Datetime Token')
        datetime_state = LexerState([source_transition], datetime_token, False,
                                    'Outer Datetime State')
        datetime_sub_lexers, datetime_start_state = self.createDatetimeLexers()
        datetime_lexer = Lexer(datetime_start_state, 'datetime',
                               datetime_sub_lexers, 'Outer Datetime Lexer')

        # Event lexer
        datetime_transition = LexerStateTransition(
            '.', 0, -1, True, datetime_state, 'Event to Datetime Transition')
        event_token = Token('event', Token.NEVER_DATA, False, 'Event Token')
        event_state = LexerState([datetime_transition], event_token, False,
                                 'Event State')
        event_lexer = Lexer(event_state, 'event',
                            [datetime_lexer, source_lexer, msg_lexer],
                            'Event Lexer')

        return event_lexer
Esempio n. 4
0
    def createLexer(self):
        # Test data
        source = 'Jul 11 09:51:54'

        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Time lexer
        end_transition = LexerStateTransition('EOS', 0, 0, True, end_state,
                                              'Time to End Transition')
        time_token = Token('time', Token.ALWAYS_DATA, True, 'Time Token')
        time_state = LexerState([end_transition], time_token, False,
                                'Time State')
        time_lexer = Lexer(time_state, 'time', [], 'Time Lexer')

        # Month lexer
        time_transition = LexerStateTransition('\s+\d{2}\s+', 0, 4, True,
                                               time_state,
                                               'Month to Time Transition')
        month_token = Token('month', Token.ALWAYS_DATA, True, 'Month Token')
        month_state = LexerState([time_transition], month_token, False,
                                 'Month State')
        month_lexer = Lexer(month_state, 'month', [], 'Month Lexer')

        # Msg state
        month_transition = LexerStateTransition('.', 0, -1, True, month_state,
                                                'Msg to Month Transition')
        msg_token = Token('msg', Token.NEVER_DATA, False, source)
        msg_state = LexerState([month_transition], msg_token, False,
                               'Msg State')

        # Msg lexer
        msg_lexer = Lexer(msg_state, 'msg', [time_lexer, month_lexer],
                          'Msg Lexer')
        msg_lexer.changeState(msg_state)
        msg_lexer.token = msg_token
        msg_lexer.stream = msg_token.getStream()

        # Show test data
        LexerTest.logger.debug('Created state:\n%s' %
                               (msg_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' %
                               (month_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' %
                               (time_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' %
                               (end_state.to_pretty_json()))

        tokens = [msg_token, month_token, time_token]
        states = [msg_state, month_state, time_state, end_state]
        lexers = [msg_lexer, month_lexer, time_lexer]

        return [lexers, tokens, states]
Esempio n. 5
0
    def createLexer(self):
        # Test data
        source = 'Jul 11 09:51:54'
        
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')
        
        # Time lexer
        end_transition = LexerStateTransition('EOS', 0, 0, True, end_state, 'Time to End Transition')
        time_token = Token('time', Token.ALWAYS_DATA, True, 'Time Token')
        time_state = LexerState([end_transition], time_token, False, 'Time State')
        time_lexer = Lexer(time_state, 'time', [], 'Time Lexer')
        
        # Month lexer
        time_transition = LexerStateTransition('\s+\d{2}\s+', 0, 4, True, time_state, 'Month to Time Transition')
        month_token = Token('month', Token.ALWAYS_DATA, True, 'Month Token')
        month_state = LexerState([time_transition], month_token, False, 'Month State')
        month_lexer = Lexer(month_state, 'month', [], 'Month Lexer')
        
        # Msg state
        month_transition = LexerStateTransition('.', 0, -1, True, month_state, 'Msg to Month Transition')
        msg_token = Token('msg', Token.NEVER_DATA, False, source)
        msg_state = LexerState([month_transition], msg_token, False, 'Msg State')
        
        # Msg lexer
        msg_lexer = Lexer(msg_state, 'msg', [time_lexer, month_lexer], 'Msg Lexer')
        msg_lexer.changeState(msg_state)
        msg_lexer.token = msg_token
        msg_lexer.stream = msg_token.getStream()
        
        # Show test data
        LexerTest.logger.debug('Created state:\n%s' % (msg_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' % (month_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' % (time_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' % (end_state.to_pretty_json()))

        tokens = [msg_token, month_token, time_token]        
        states = [msg_state, month_state, time_state, end_state]
        lexers = [msg_lexer, month_lexer, time_lexer]
        
        return [ lexers, tokens, states ]
Esempio n. 6
0
    def constructLexer(cls, lexer_json):
        # Lexer
        lexer_name = lexer_json['name']
        lexer_type = lexer_json['lexer_type']
        sub_lexers = lexer_json['sub_lexers']
        start_state = lexer_json['start_state']
        root = cls.getBoolean(lexer_json['root'])

        lexer = Lexer(start_state, lexer_type, sub_lexers, lexer_name)

        return (lexer, root)
Esempio n. 7
0
    def testAppendToken(self):
        LexerTest.logger.debug('Test appending a token.')

        # Test data
        token = Token('str', Token.ALWAYS_DATA, True, 'Main Token')
        sub_token = Token('str', Token.ALWAYS_DATA, True, 'Sub Token')
        lexer = Lexer(None, 'msg', [], 'Unit Test Lexer')
        lexer.token = token

        # Show test data
        LexerTest.logger.debug('Created main token:\n%s' %
                               (token.to_pretty_json()))
        LexerTest.logger.debug('Created sub token:\n%s' %
                               (sub_token.to_pretty_json()))
        LexerTest.logger.debug('Created lexer:\n%s' % (lexer.to_pretty_json()))

        # Run test
        lexer.appendToken(sub_token)
        assert len(lexer.sub_tokens) > 0, 'No tokens found in list.'
        added_token = lexer.sub_tokens[-1]

        # Show test output
        LexerTest.logger.debug('Token added to lexer:\n%s' %
                               (added_token.to_pretty_json()))

        # Verify results
        self.findErrors(lexer)

        LexerTest.logger.debug('Test succeeded!')
Esempio n. 8
0
    def createMsgLexers(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Sub msg lexer
        end_transition = LexerStateTransition('EOS', 0, 1, True, end_state,
                                              'Sub Msg to End Transition')
        sub_msg_token = Token('sub_msg', Token.SOMETIMES_DATA, True,
                              'Sub Msg Token')
        sub_msg_state = LexerState([end_transition], sub_msg_token, False,
                                   'Sub Msg State')
        sub_msg_lexer = Lexer(None, 'sub_msg', [], 'Sub Msg Lexer')

        # Level lexer
        level_to_sub_msg_transition = LexerStateTransition(
            '>', 0, 1, True, sub_msg_state, 'Level to Sub Msg Transition')
        level_token = Token('level', Token.SOMETIMES_DATA, True, 'Level Token')
        level_state = LexerState([level_to_sub_msg_transition], level_token,
                                 False, 'Level State')
        level_lexer = Lexer(None, 'level', [], 'Level Lexer')

        # Msg lexer
        #precise_seconds_transition = LexerStateTransition('\[', -1, 1, False, precise_seconds_state, 'Msg to Precise Seconds Transition')
        level_transition = LexerStateTransition('[<]', -1, 1, False,
                                                level_state,
                                                'Msg to Level Transition')
        sub_msg_transition = LexerStateTransition('[^<]', -1, 0, False,
                                                  sub_msg_state,
                                                  'Msg to Sub Msg Transition')
        end_transition = LexerStateTransition('EOS', 0, 1, True, end_state,
                                              'Msg to End Transition')
        msg_token = Token('msg', Token.NEVER_DATA, False, 'Msg Token')
        msg_state = LexerState(
            [level_transition, sub_msg_transition, end_transition], msg_token,
            True, 'Msg State')
        msg_lexer = Lexer(None, 'msg', [], 'Msg Lexer')

        return ((msg_lexer, level_lexer, sub_msg_lexer), msg_state)
Esempio n. 9
0
    def createDatetimeLexers(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Second lexer
        end_transition = LexerStateTransition('EOS', 0, 0, False, end_state,
                                              'Second to End Transition')
        second_token = Token('second', Token.ALWAYS_DATA, True, 'Second Token')
        second_state = LexerState([end_transition], second_token, False,
                                  'Second State')
        second_lexer = Lexer(None, 'second', [], 'Second Lexer')

        # Minute lexer
        second_transition = LexerStateTransition(
            ':', 0, 1, False, second_state, 'Minute to Second Transition')
        minute_token = Token('minute', Token.ALWAYS_DATA, True, 'Minute Token')
        minute_state = LexerState([second_transition], minute_token, False,
                                  'Minute State')
        minute_lexer = Lexer(None, 'minute', [], 'Minute Lexer')

        # Hour lexer
        minute_transition = LexerStateTransition(':', 0, 1, False,
                                                 minute_state,
                                                 'Hour to Minute Transition')
        hour_token = Token('hour', Token.ALWAYS_DATA, True, 'Hour Token')
        hour_state = LexerState([minute_transition], hour_token, False,
                                'Hour State')
        hour_lexer = Lexer(None, 'hour', [], 'Hour Lexer')

        # Day lexer
        hour_transition = LexerStateTransition('\s+', 0, 1, True, hour_state,
                                               'Day to Hour Transition')
        day_token = Token('day', Token.ALWAYS_DATA, True, 'Day Token')
        day_state = LexerState([hour_transition], day_token, False,
                               'Day State')
        day_lexer = Lexer(None, 'day', [], 'Day Lexer')

        # Month lexer
        day_transition = LexerStateTransition('\s+', 0, 1, True, day_state,
                                              'Month to Day Transition')
        month_token = Token('month', Token.ALWAYS_DATA, True, 'Month Token')
        month_state = LexerState([day_transition], month_token, False,
                                 'Month State')
        month_lexer = Lexer(None, 'month', [], 'Month Lexer')

        # Datetime lexer
        month_transition = LexerStateTransition(
            '.', 0, -1, True, month_state, 'Datetime to Month Transition')
        datetime_token = Token('datetime', Token.NEVER_DATA, False,
                               'Datetime Token')
        datetime_state = LexerState([month_transition], datetime_token, False,
                                    'Datetime State')
        datetime_lexer = Lexer(None, 'datetime', [], 'Datetime Lexer')

        return ((datetime_lexer, month_lexer, day_lexer, hour_lexer,
                 minute_lexer, second_lexer), datetime_state)