示例#1
0
 def testBraceRangeLexer(self):
     lex = match.BRACE_RANGE_LEXER
     print(list(lex.Tokens('a..z')))
     print(list(lex.Tokens('100..300')))
     print(list(lex.Tokens('-300..-100..1')))
     print(list(lex.Tokens('1.3')))  # invalid
     print(list(lex.Tokens('aa')))
示例#2
0
    def testEchoLexer(self):
        lex = match.ECHO_LEXER
        print(list(lex.Tokens(r'newline \n NUL \0 octal \0377 hex \x00')))
        print(list(lex.Tokens(r'unicode \u0065 \U00000065')))
        print(list(lex.Tokens(r'\d \e \f \g')))

        # NOTE: We only test with one of these.
        print(match.ECHO_MATCHER)  # either fast or slow
示例#3
0
    def testHistoryLexer(self):
        lex = match.HISTORY_LEXER

        print(list(lex.Tokens(r'echo hi')))

        print(list(lex.Tokens(r'echo !! !* !^ !$')))

        # No history operator with \ escape
        tokens = list(lex.Tokens(r'echo \!!'))
        print(tokens)
        self.assert_(Id.History_Op not in [tok_type for tok_type, _ in tokens])

        print(list(lex.Tokens(r'echo !3...')))
        print(list(lex.Tokens(r'echo !-5...')))
        print(list(lex.Tokens(r'echo !x/foo.py bar')))

        print('---')

        # No history operator in single quotes
        tokens = list(lex.Tokens(r"echo '!!' $'!!' "))
        print(tokens)
        self.assert_(Id.History_Op not in [tok_type for tok_type, _ in tokens])

        # No history operator in incomplete single quotes
        tokens = list(lex.Tokens(r"echo '!! "))
        print(tokens)
        self.assert_(Id.History_Op not in [tok_type for tok_type, _ in tokens])

        # Quoted single quote, and then a History operator
        tokens = list(lex.Tokens(r"echo \' !! "))
        print(tokens)
        # YES operator
        self.assert_(Id.History_Op in [tok_type for tok_type, _ in tokens])
示例#4
0
    def testHistoryDoesNotConflict(self):
        lex = match.HISTORY_LEXER

        # https://github.com/oilshell/oil/issues/264
        #
        # Bash has a bunch of hacks to suppress the conflict between ! for history
        # and:
        #
        # 1. [!abc] globbing
        # 2. ${!foo} indirect expansion
        # 3. $!x -- the PID
        # 4. !(foo|bar) -- extended glob
        #
        # I guess [[ a != b ]] doesn't match the pattern in bash.

        three_other = [Id.History_Other, Id.History_Other, Id.History_Other]
        two_other = [Id.History_Other, Id.History_Other]
        CASES = [
            (r'[!abc]', three_other),
            (r'${!indirect}', three_other),
            (r'$!x', three_other),  # didn't need a special case
            (r'!(foo|bar)', two_other),  # didn't need a special case
        ]

        for s, expected_types in CASES:
            tokens = list(lex.Tokens(s))
            print(tokens)
            actual_types = [id_ for id_, val in tokens]

            self.assert_(Id.History_Search not in actual_types, tokens)

            self.assertEqual(expected_types, actual_types)
示例#5
0
 def testEchoLexer(self):
     CASES = [
         r'newline \n NUL \0 octal \0377 hex \x00',
         r'unicode \u0065 \U00000065',
         r'\d \e \f \g',
     ]
     for s in CASES:
         lex = match.EchoLexer(s)
         print(lex.Tokens())
示例#6
0
 def testBraceRangeLexer(self):
     CASES = [
         'a..z',
         '100..300',
         '-300..-100..1',
         '1.3',  # invalid
         'aa',
     ]
     for s in CASES:
         lex = match.BraceRangeLexer(s)
         print(lex.Tokens())
示例#7
0
 def testHistoryLexer(self):
   lex = match.HISTORY_LEXER
   print(list(lex.Tokens(r'echo hi')))
   print(list(lex.Tokens(r'echo !! !* !^ !$')))
   print(list(lex.Tokens(r'echo \!!')))
   print(list(lex.Tokens(r'echo !3...')))
   print(list(lex.Tokens(r'echo !-5...')))
   print(list(lex.Tokens(r'echo !x/foo.py bar')))
示例#8
0
 def testPS1Lexer(self):
     lex = match.PS1_LEXER
     print(list(lex.Tokens(r'foo')))
     print(list(lex.Tokens(r'\h \w \$')))
示例#9
0
def Ps1Tokens(s):
    # type: (str) -> List[Tuple[Id_t, str]]
    lex = SimpleLexer(PS1_MATCHER, s)
    return lex.Tokens()
示例#10
0
def HistoryTokens(s):
    # type: (str) -> List[Tuple[Id_t, str]]
    lex = SimpleLexer(HISTORY_MATCHER, s)
    return lex.Tokens()