Ejemplo n.º 1
0
    def createSourceLexers(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Component id lexer
        comp_id_to_end_transition = LexerStateTransition(
            'EOS', -1, 0, True, end_state, 'Component ID to End Transition')
        comp_id_token = Token('component_id', Token.SOMETIMES_DATA, True,
                              'Component ID Token')
        comp_id_state = LexerState([comp_id_to_end_transition], comp_id_token,
                                   False, 'Component ID State')
        comp_id_lexer = Lexer(None, 'component_id', [], 'Component ID Lexer')

        # Component lexer
        comp_to_end_transition = LexerStateTransition(
            'EOS', 0, 0, True, end_state, 'Component to End Transition')
        comp_id_transition = LexerStateTransition(
            '\[', 0, 1, True, comp_id_state,
            'Component to Component ID Transition')
        comp_token = Token('component', Token.ALWAYS_DATA, True,
                           'Component Token')
        comp_state = LexerState([comp_to_end_transition, comp_id_transition],
                                comp_token, False, 'Component State')
        comp_lexer = Lexer(None, 'component', [], 'Component Lexer')

        # Source lexer
        comp_transition = LexerStateTransition(
            '.', 0, -1, False, comp_state, 'Source to Component Transition')
        source_token = Token('source', Token.NEVER_DATA, False, 'Source Token')
        source_state = LexerState([comp_transition], source_token, False,
                                  'Source State')
        source_lexer = Lexer(None, 'source', [], 'Source Lexer')

        return ((source_lexer, comp_lexer, comp_id_lexer), source_state)
Ejemplo n.º 2
0
 def testAppendToken(self):
     LexerTest.logger.debug('Test appending a token.')
     
     # Test data
     token = Token('str', Token.ALWAYS_DATA, True, 'Main Token')
     sub_token = Token('str', Token.ALWAYS_DATA, True, 'Sub Token')
     lexer = Lexer(None, 'msg', [], 'Unit Test Lexer')
     lexer.token = token
     
     # Show test data
     LexerTest.logger.debug('Created main token:\n%s' % (token.to_pretty_json()))
     LexerTest.logger.debug('Created sub token:\n%s' % (sub_token.to_pretty_json()))
     LexerTest.logger.debug('Created lexer:\n%s' % (lexer.to_pretty_json()))
     
     # Run test
     lexer.appendToken(sub_token)
     assert len(lexer.sub_tokens) > 0, 'No tokens found in list.'
     added_token = lexer.sub_tokens[-1]
     
     # Show test output
     LexerTest.logger.debug('Token added to lexer:\n%s' % (added_token.to_pretty_json()))
     
     # Verify results
     self.findErrors(lexer)
     
     LexerTest.logger.debug('Test succeeded!')
Ejemplo n.º 3
0
    def createTokens(self, test_data):
        # Data structure
        tokens = []

        # Month token
        month_token = Token('month', Token.ALWAYS_DATA, True,
                            test_data['month'])
        tokens.append(month_token)

        # Day token
        day_token = Token('day', Token.ALWAYS_DATA, True, test_data['day'])
        tokens.append(day_token)

        # Hour token
        hour_token = Token('hour', Token.ALWAYS_DATA, True, test_data['hour'])
        tokens.append(hour_token)

        # Minute token
        minute_token = Token('minute', Token.ALWAYS_DATA, True,
                             test_data['minute'])
        tokens.append(minute_token)

        # Second token
        second_token = Token('second', Token.ALWAYS_DATA, True,
                             test_data['second'])
        tokens.append(second_token)

        return tokens
Ejemplo n.º 4
0
 def lexEvent(self, event):
     token = Token('event', Token.ALWAYS_DATA, False, event)
     JsonImporterTest.logger.debug('Created token:\n%s' % (token.to_pretty_json()))
     
     self.lexer.start(token)
     tokens = self.lexer.getAllTokens()
     
     JsonImporterTest.logger.debug('Found %d tokens.' % (len(tokens)))
     assert len(tokens) > 0, 'No tokens found in list.'
     
     for i, sub_token in enumerate(tokens):
         JsonImporterTest.logger.debug('Found token %d:\n%s' % (i, sub_token.to_pretty_json()))
Ejemplo n.º 5
0
    def lexEvent(self, event):
        token = Token('event', Token.ALWAYS_DATA, False, event)
        LexerParserTest.logger.debug('Created token:\n%s' %
                                     (token.to_pretty_json()))

        self.lexer.start(token)
        tokens = self.lexer.getAllTokens()

        LexerParserTest.logger.debug('Found %d tokens.' % (len(tokens)))
        assert len(tokens) > 0, 'No tokens found in list.'

        for i, sub_token in enumerate(tokens):
            LexerParserTest.logger.debug('Found token %d:\n%s' %
                                         (i, sub_token.to_pretty_json()))
Ejemplo n.º 6
0
    def createEventLexer(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Msg lexer
        end_transition = LexerStateTransition('EOS', 0, 0, True, end_state,
                                              'Msg to End Transition')
        msg_token = Token('msg', Token.SOMETIMES_DATA, False,
                          'Outer Msg Token')
        msg_state = LexerState([end_transition], msg_token, False,
                               'Outer Msg State')
        msg_sub_lexers, msg_start_state = self.createMsgLexers()
        msg_lexer = Lexer(msg_start_state, 'msg', msg_sub_lexers,
                          'Outer Msg Lexer')

        # Source lexer
        msg_transition = LexerStateTransition(':', 0, 1, True, msg_state,
                                              'Source to Msg Transition')
        source_token = Token('source', Token.ALWAYS_DATA, False,
                             'Outer Source Token')
        source_state = LexerState([msg_transition], source_token, True,
                                  'Outer Source State')
        source_sub_lexers, source_start_state = self.createSourceLexers()
        source_lexer = Lexer(source_start_state, 'source', source_sub_lexers,
                             'Outer Source Lexer')

        # Datetime lexer
        source_transition = LexerStateTransition(
            '\d{2}:\d{2}:\d{2}', 8, 8, True, source_state,
            'Datetime to Source Transition')
        datetime_token = Token('datetime', Token.ALWAYS_DATA, False,
                               'Outer Datetime Token')
        datetime_state = LexerState([source_transition], datetime_token, False,
                                    'Outer Datetime State')
        datetime_sub_lexers, datetime_start_state = self.createDatetimeLexers()
        datetime_lexer = Lexer(datetime_start_state, 'datetime',
                               datetime_sub_lexers, 'Outer Datetime Lexer')

        # Event lexer
        datetime_transition = LexerStateTransition(
            '.', 0, -1, True, datetime_state, 'Event to Datetime Transition')
        event_token = Token('event', Token.NEVER_DATA, False, 'Event Token')
        event_state = LexerState([datetime_transition], event_token, False,
                                 'Event State')
        event_lexer = Lexer(event_state, 'event',
                            [datetime_lexer, source_lexer, msg_lexer],
                            'Event Lexer')

        return event_lexer
Ejemplo n.º 7
0
    def createTokens(self, test_data):
        # Data structure
        tokens = []

        # Msg 1 token
        msg_token_1 = Token('msg_1', Token.ALWAYS_DATA, True,
                            test_data['msg_1'])
        tokens.append(msg_token_1)

        # Msg 2 token
        msg_token_2 = Token('msg_2', Token.ALWAYS_DATA, True,
                            test_data['msg_2'])
        tokens.append(msg_token_2)

        return tokens
Ejemplo n.º 8
0
    def testNextState(self):
        LexerStateTest.logger.debug('Testing the matching of a sequence.')

        # Test data
        source = 'Jul 11 09:51:54'
        stream = Stream(source, 'Unit Test Stream')
        token = Token('str', Token.ALWAYS_DATA, True, 'Token data')
        state_b = LexerState([], token, True, 'State B')
        pattern = '\s+\d{2}\s+'
        start_offet = 0
        end_offset = 4
        transition = LexerStateTransition(pattern, start_offet, end_offset,
                                          True, state_b, 'A to B Transition')
        state_a = LexerState([transition], token, False, 'State A')

        # Show test data
        LexerStateTest.logger.debug('Created stream:\n%s' %
                                    (stream.to_pretty_json()))
        LexerStateTest.logger.debug('Created state A:\n%s' %
                                    (state_a.to_pretty_json()))
        LexerStateTest.logger.debug('Created state B:\n%s' %
                                    (state_b.to_pretty_json()))

        # Run test
        next_state = state_a
        new_token = None
        while next_state == state_a:
            current_element = stream.getNextElement()
            next_state, new_token = state_a.nextState(stream)

            # Show test output
            LexerStateTest.logger.debug('Fetching element: %s' %
                                        (current_element))

        # Show test output
        LexerStateTest.logger.debug('Token found:\n%s' %
                                    (new_token.to_pretty_json()))
        LexerStateTest.logger.debug('Final stream state:\n%s' %
                                    (stream.to_pretty_json()))
        LexerStateTest.logger.debug('Final lexer state:\n%s' %
                                    (next_state.to_pretty_json()))

        # Verify results
        token.data = source[:3]
        assert new_token == token, 'The token found is incorrect.'

        LexerStateTest.logger.debug('Test succeeded!')
Ejemplo n.º 9
0
    def createTokens(self, test_data):
        # Data structure
        tokens = []

        # Msg 1 token
        state_token = Token('state', Token.ALWAYS_DATA, True, test_data)
        tokens.append(state_token)

        return tokens
Ejemplo n.º 10
0
    def createTokens(self, test_data):
        # Data structure
        tokens = []

        # Msg 1 token
        int_token = Token('int', Token.ALWAYS_DATA, True, test_data['int'])
        tokens.append(int_token)

        return tokens
Ejemplo n.º 11
0
    def createDatetimeLexers(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Second lexer
        end_transition = LexerStateTransition('EOS', 0, 0, False, end_state,
                                              'Second to End Transition')
        second_token = Token('second', Token.ALWAYS_DATA, True, 'Second Token')
        second_state = LexerState([end_transition], second_token, False,
                                  'Second State')
        second_lexer = Lexer(None, 'second', [], 'Second Lexer')

        # Minute lexer
        second_transition = LexerStateTransition(
            ':', 0, 1, False, second_state, 'Minute to Second Transition')
        minute_token = Token('minute', Token.ALWAYS_DATA, True, 'Minute Token')
        minute_state = LexerState([second_transition], minute_token, False,
                                  'Minute State')
        minute_lexer = Lexer(None, 'minute', [], 'Minute Lexer')

        # Hour lexer
        minute_transition = LexerStateTransition(':', 0, 1, False,
                                                 minute_state,
                                                 'Hour to Minute Transition')
        hour_token = Token('hour', Token.ALWAYS_DATA, True, 'Hour Token')
        hour_state = LexerState([minute_transition], hour_token, False,
                                'Hour State')
        hour_lexer = Lexer(None, 'hour', [], 'Hour Lexer')

        # Day lexer
        hour_transition = LexerStateTransition('\s+', 0, 1, True, hour_state,
                                               'Day to Hour Transition')
        day_token = Token('day', Token.ALWAYS_DATA, True, 'Day Token')
        day_state = LexerState([hour_transition], day_token, False,
                               'Day State')
        day_lexer = Lexer(None, 'day', [], 'Day Lexer')

        # Month lexer
        day_transition = LexerStateTransition('\s+', 0, 1, True, day_state,
                                              'Month to Day Transition')
        month_token = Token('month', Token.ALWAYS_DATA, True, 'Month Token')
        month_state = LexerState([day_transition], month_token, False,
                                 'Month State')
        month_lexer = Lexer(None, 'month', [], 'Month Lexer')

        # Datetime lexer
        month_transition = LexerStateTransition(
            '.', 0, -1, True, month_state, 'Datetime to Month Transition')
        datetime_token = Token('datetime', Token.NEVER_DATA, False,
                               'Datetime Token')
        datetime_state = LexerState([month_transition], datetime_token, False,
                                    'Datetime State')
        datetime_lexer = Lexer(None, 'datetime', [], 'Datetime Lexer')

        return ((datetime_lexer, month_lexer, day_lexer, hour_lexer,
                 minute_lexer, second_lexer), datetime_state)
Ejemplo n.º 12
0
    def createLexer(self):
        # Test data
        source = 'Jul 11 09:51:54'
        
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')
        
        # Time lexer
        end_transition = LexerStateTransition('EOS', 0, 0, True, end_state, 'Time to End Transition')
        time_token = Token('time', Token.ALWAYS_DATA, True, 'Time Token')
        time_state = LexerState([end_transition], time_token, False, 'Time State')
        time_lexer = Lexer(time_state, 'time', [], 'Time Lexer')
        
        # Month lexer
        time_transition = LexerStateTransition('\s+\d{2}\s+', 0, 4, True, time_state, 'Month to Time Transition')
        month_token = Token('month', Token.ALWAYS_DATA, True, 'Month Token')
        month_state = LexerState([time_transition], month_token, False, 'Month State')
        month_lexer = Lexer(month_state, 'month', [], 'Month Lexer')
        
        # Msg state
        month_transition = LexerStateTransition('.', 0, -1, True, month_state, 'Msg to Month Transition')
        msg_token = Token('msg', Token.NEVER_DATA, False, source)
        msg_state = LexerState([month_transition], msg_token, False, 'Msg State')
        
        # Msg lexer
        msg_lexer = Lexer(msg_state, 'msg', [time_lexer, month_lexer], 'Msg Lexer')
        msg_lexer.changeState(msg_state)
        msg_lexer.token = msg_token
        msg_lexer.stream = msg_token.getStream()
        
        # Show test data
        LexerTest.logger.debug('Created state:\n%s' % (msg_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' % (month_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' % (time_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' % (end_state.to_pretty_json()))

        tokens = [msg_token, month_token, time_token]        
        states = [msg_state, month_state, time_state, end_state]
        lexers = [msg_lexer, month_lexer, time_lexer]
        
        return [ lexers, tokens, states ]
Ejemplo n.º 13
0
    def testAppendToken(self):
        LexerTest.logger.debug('Test appending a token.')

        # Test data
        token = Token('str', Token.ALWAYS_DATA, True, 'Main Token')
        sub_token = Token('str', Token.ALWAYS_DATA, True, 'Sub Token')
        lexer = Lexer(None, 'msg', [], 'Unit Test Lexer')
        lexer.token = token

        # Show test data
        LexerTest.logger.debug('Created main token:\n%s' %
                               (token.to_pretty_json()))
        LexerTest.logger.debug('Created sub token:\n%s' %
                               (sub_token.to_pretty_json()))
        LexerTest.logger.debug('Created lexer:\n%s' % (lexer.to_pretty_json()))

        # Run test
        lexer.appendToken(sub_token)
        assert len(lexer.sub_tokens) > 0, 'No tokens found in list.'
        added_token = lexer.sub_tokens[-1]

        # Show test output
        LexerTest.logger.debug('Token added to lexer:\n%s' %
                               (added_token.to_pretty_json()))

        # Verify results
        self.findErrors(lexer)

        LexerTest.logger.debug('Test succeeded!')
Ejemplo n.º 14
0
    def createMsgLexers(self):
        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Sub msg lexer
        end_transition = LexerStateTransition('EOS', 0, 1, True, end_state,
                                              'Sub Msg to End Transition')
        sub_msg_token = Token('sub_msg', Token.SOMETIMES_DATA, True,
                              'Sub Msg Token')
        sub_msg_state = LexerState([end_transition], sub_msg_token, False,
                                   'Sub Msg State')
        sub_msg_lexer = Lexer(None, 'sub_msg', [], 'Sub Msg Lexer')

        # Level lexer
        level_to_sub_msg_transition = LexerStateTransition(
            '>', 0, 1, True, sub_msg_state, 'Level to Sub Msg Transition')
        level_token = Token('level', Token.SOMETIMES_DATA, True, 'Level Token')
        level_state = LexerState([level_to_sub_msg_transition], level_token,
                                 False, 'Level State')
        level_lexer = Lexer(None, 'level', [], 'Level Lexer')

        # Msg lexer
        #precise_seconds_transition = LexerStateTransition('\[', -1, 1, False, precise_seconds_state, 'Msg to Precise Seconds Transition')
        level_transition = LexerStateTransition('[<]', -1, 1, False,
                                                level_state,
                                                'Msg to Level Transition')
        sub_msg_transition = LexerStateTransition('[^<]', -1, 0, False,
                                                  sub_msg_state,
                                                  'Msg to Sub Msg Transition')
        end_transition = LexerStateTransition('EOS', 0, 1, True, end_state,
                                              'Msg to End Transition')
        msg_token = Token('msg', Token.NEVER_DATA, False, 'Msg Token')
        msg_state = LexerState(
            [level_transition, sub_msg_transition, end_transition], msg_token,
            True, 'Msg State')
        msg_lexer = Lexer(None, 'msg', [], 'Msg Lexer')

        return ((msg_lexer, level_lexer, sub_msg_lexer), msg_state)
Ejemplo n.º 15
0
 def testNextState(self):
     LexerStateTest.logger.debug('Testing the matching of a sequence.')
     
     # Test data
     source = 'Jul 11 09:51:54'
     stream = Stream(source, 'Unit Test Stream') 
     token = Token('str', Token.ALWAYS_DATA, True, 'Token data')
     state_b = LexerState([], token, True, 'State B')
     pattern = '\s+\d{2}\s+'
     start_offet = 0
     end_offset = 4
     transition = LexerStateTransition(pattern, start_offet, end_offset, True, state_b, 'A to B Transition')
     state_a = LexerState([transition], token, False, 'State A')
     
     # Show test data
     LexerStateTest.logger.debug('Created stream:\n%s' % (stream.to_pretty_json()))
     LexerStateTest.logger.debug('Created state A:\n%s' % (state_a.to_pretty_json()))
     LexerStateTest.logger.debug('Created state B:\n%s' % (state_b.to_pretty_json()))
     
     # Run test
     next_state = state_a
     new_token = None
     while next_state == state_a:
         current_element = stream.getNextElement()
         next_state, new_token = state_a.nextState(stream)
         
         # Show test output
         LexerStateTest.logger.debug('Fetching element: %s' % (current_element))
         
     # Show test output
     LexerStateTest.logger.debug('Token found:\n%s' % (new_token.to_pretty_json()))
     LexerStateTest.logger.debug('Final stream state:\n%s' % (stream.to_pretty_json()))
     LexerStateTest.logger.debug('Final lexer state:\n%s' % (next_state.to_pretty_json()))
     
     # Verify results
     token.data = source[:3]
     assert new_token == token, 'The token found is incorrect.'
         
     LexerStateTest.logger.debug('Test succeeded!')
Ejemplo n.º 16
0
 def testGetStream(self):
     TokenTest.logger.debug('Test run creation of a stream from the token.')
     
     # Test data
     data = 'Jul 11 09:51:54'
     data_type = 'str'
     token = Token(data_type, Token.ALWAYS_DATA, False, data)
     
     # Show test data
     TokenTest.logger.debug('Created data: "%s"' % (data))
     TokenTest.logger.debug('Created token:\n%s' % (token.to_pretty_json()))
     
     # Run test
     stream = token.getStream()
     
     # Show test output
     TokenTest.logger.debug('Returned stream:\n%s' % (stream.to_pretty_json()))
     
     # Verify results
     assert stream.source == data, 'The stream data was set incorrectly.'
     assert stream.name == data_type, 'The stream data type was set incorrectly.'
     
     TokenTest.logger.debug('Test succeeded!')
Ejemplo n.º 17
0
 def createTokens(self):
     # Data structure
     tokens = []
     
     # Month token
     month_token = Token('month', Token.ALWAYS_DATA, True, 'Jul')
     tokens.append(month_token)
     
     # Day token
     day_token = Token('day', Token.ALWAYS_DATA, True, '11')
     tokens.append(day_token)
     
     # Hour token
     hour_token = Token('hour', Token.ALWAYS_DATA, True, '09')
     tokens.append(hour_token)
     
     # Minute token
     minute_token = Token('minute', Token.ALWAYS_DATA, True, '51')
     tokens.append(minute_token)
     
     # Second token
     second_token = Token('second', Token.ALWAYS_DATA, True, '69')
     tokens.append(second_token)
     
     # Component token
     component_token = Token('component', Token.SOMETIMES_DATA, True, 'ubuntu NetworkManager')
     tokens.append(component_token)
     
     # Component id token
     component_id_token = Token('component_id', Token.SOMETIMES_DATA, True, '887')
     tokens.append(component_id_token)
     
     # Level token
     level_token = Token('level', Token.SOMETIMES_DATA, True, 'info')
     tokens.append(level_token)
     
     # Msg token
     sub_msg_token = Token('sub_msg', Token.SOMETIMES_DATA, True, "monitoring kernel firmware directory '/lib/firmware'.")
     tokens.append(sub_msg_token)
     
     return tokens
Ejemplo n.º 18
0
    def setNextStates(cls, states):
        for state in states:
            for transition in state.transition_table:
                next_state_name = transition.next_state

                if next_state_name == 'end':
                    end_token = Token('end', Token.NEVER_DATA, True,
                                      'End Token')
                    end_state = LexerState([], end_token, True, 'End State')
                    transition.next_state = end_state
                else:
                    for inner_state in states:
                        if next_state_name == inner_state.name:
                            transition.next_state = inner_state
                            break
Ejemplo n.º 19
0
    def testGetStream(self):
        TokenTest.logger.debug('Test run creation of a stream from the token.')

        # Test data
        data = 'Jul 11 09:51:54'
        data_type = 'str'
        token = Token(data_type, Token.ALWAYS_DATA, False, data)

        # Show test data
        TokenTest.logger.debug('Created data: "%s"' % (data))
        TokenTest.logger.debug('Created token:\n%s' % (token.to_pretty_json()))

        # Run test
        stream = token.getStream()

        # Show test output
        TokenTest.logger.debug('Returned stream:\n%s' %
                               (stream.to_pretty_json()))

        # Verify results
        assert stream.source == data, 'The stream data was set incorrectly.'
        assert stream.name == data_type, 'The stream data type was set incorrectly.'

        TokenTest.logger.debug('Test succeeded!')
Ejemplo n.º 20
0
    def contructToken(cls, token_json):
        token_type = token_json['data_type']

        data_aval = token_json['data'].lower()
        if data_aval == 'always':
            data_aval = Token.ALWAYS_DATA
        elif data_aval == 'sometimes':
            data_aval = Token.SOMETIMES_DATA
        elif data_aval == 'never':
            data_aval = Token.NEVER_DATA
        else:
            raise SyntaxError('Data availability not found.')

        atomic = cls.getBoolean(token_json['atomic'])

        return Token(token_type, data_aval, atomic, '')
Ejemplo n.º 21
0
    def createLexer(self):
        # Test data
        source = 'Jul 11 09:51:54'

        # End state
        end_token = Token('end', Token.NEVER_DATA, True, 'End Token')
        end_state = LexerState([], end_token, True, 'End State')

        # Time lexer
        end_transition = LexerStateTransition('EOS', 0, 0, True, end_state,
                                              'Time to End Transition')
        time_token = Token('time', Token.ALWAYS_DATA, True, 'Time Token')
        time_state = LexerState([end_transition], time_token, False,
                                'Time State')
        time_lexer = Lexer(time_state, 'time', [], 'Time Lexer')

        # Month lexer
        time_transition = LexerStateTransition('\s+\d{2}\s+', 0, 4, True,
                                               time_state,
                                               'Month to Time Transition')
        month_token = Token('month', Token.ALWAYS_DATA, True, 'Month Token')
        month_state = LexerState([time_transition], month_token, False,
                                 'Month State')
        month_lexer = Lexer(month_state, 'month', [], 'Month Lexer')

        # Msg state
        month_transition = LexerStateTransition('.', 0, -1, True, month_state,
                                                'Msg to Month Transition')
        msg_token = Token('msg', Token.NEVER_DATA, False, source)
        msg_state = LexerState([month_transition], msg_token, False,
                               'Msg State')

        # Msg lexer
        msg_lexer = Lexer(msg_state, 'msg', [time_lexer, month_lexer],
                          'Msg Lexer')
        msg_lexer.changeState(msg_state)
        msg_lexer.token = msg_token
        msg_lexer.stream = msg_token.getStream()

        # Show test data
        LexerTest.logger.debug('Created state:\n%s' %
                               (msg_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' %
                               (month_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' %
                               (time_state.to_pretty_json()))
        LexerTest.logger.debug('Created state:\n%s' %
                               (end_state.to_pretty_json()))

        tokens = [msg_token, month_token, time_token]
        states = [msg_state, month_state, time_state, end_state]
        lexers = [msg_lexer, month_lexer, time_lexer]

        return [lexers, tokens, states]
Ejemplo n.º 22
0
 def lexEvent(self, event):
     token = Token('event', Token.ALWAYS_DATA, False, event)
     self.lexer.start(token)
     tokens = self.lexer.getAllTokens()
     return tokens