コード例 #1
0
ファイル: cmd_exec_test.py プロジェクト: shamrin/oil
  def testVarOps(self):
    ev = InitEvaluator()  # initializes x=xxx and y=yyy
    unset_sub = braced_var_sub(Tok(Id.VSub_Name, 'unset'))
    part_vals = []
    ev._EvalWordPart(unset_sub, part_vals)
    print(part_vals)

    set_sub = braced_var_sub(Tok(Id.VSub_Name, 'x'))
    part_vals = []
    ev._EvalWordPart(set_sub, part_vals)
    print(part_vals)

    # Now add some ops
    part = Tok(Id.Lit_Chars, 'default')
    arg_word = compound_word([part])
    test_op = suffix_op.Unary(Id.VTest_ColonHyphen, arg_word)
    unset_sub.suffix_op = test_op
    set_sub.suffix_op = test_op

    part_vals = []
    ev._EvalWordPart(unset_sub, part_vals)
    print(part_vals)

    part_vals = []
    ev._EvalWordPart(set_sub, part_vals)
    print(part_vals)
コード例 #2
0
    def testRead(self):
        lexer = _InitLexer(CMD)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'ls'), t)
        t = lexer.Read(lex_mode_e.ShCommand)

        self.assertTokensEqual(Tok(Id.WS_Space, None), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, '/'), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Op_Newline, None), t)

        # Line two
        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'ls'), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.WS_Space, None), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, '/home/'), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Op_Newline, None), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Eof_Real, ''), t)

        # Another EOF gives EOF
        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Eof_Real, ''), t)
コード例 #3
0
    def testLookAhead(self):
        # Lines always end with '\n'
        l = LineLexer('', self.arena)
        self.assertEqual(Id.Unknown_Tok, l.LookAhead(lex_mode_e.ShCommand))

        l = LineLexer('foo', self.arena)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'foo'),
                               l.Read(lex_mode_e.ShCommand))
        self.assertEqual(Id.Unknown_Tok, l.LookAhead(lex_mode_e.ShCommand))

        l = LineLexer('foo  bar', self.arena)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'foo'),
                               l.Read(lex_mode_e.ShCommand))
        self.assertEqual(Id.Lit_Chars, l.LookAhead(lex_mode_e.ShCommand))

        # No lookahead; using the cursor!
        l = LineLexer('fun(', self.arena)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'fun'),
                               l.Read(lex_mode_e.ShCommand))
        self.assertEqual(Id.Op_LParen, l.LookAhead(lex_mode_e.ShCommand))

        l = LineLexer('fun  (', self.arena)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'fun'),
                               l.Read(lex_mode_e.ShCommand))
        self.assertEqual(Id.Op_LParen, l.LookAhead(lex_mode_e.ShCommand))
コード例 #4
0
    def testMode_DollarSq(self):
        lexer = _InitLexer(r'foo bar\n \x00 \000 \u0065')

        t = lexer.Read(lex_mode_e.SQ_C)
        print(t)
        self.assertTokensEqual(Tok(Id.Char_Literals, 'foo bar'), t)

        t = lexer.Read(lex_mode_e.SQ_C)
        print(t)
        self.assertTokensEqual(Tok(Id.Char_OneChar, r'\n'), t)
コード例 #5
0
    def testToken(self):
        t = Tok(Id.Lit_Chars, 'abc')
        print(t)

        # This redundancy is OK I guess.
        t = Tok(Id.Lit_LBrace, '{')
        print(t)

        t = Tok(Id.Op_Semi, ';')
        print(t)
コード例 #6
0
    def testMode_ExtGlob(self):
        lexer = _InitLexer('@(foo|bar)')

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.ExtGlob_At, '@('), t)

        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'foo'), t)

        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.Op_Pipe, None), t)

        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'bar'), t)

        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.Op_RParen, None), t)

        # Individual cases

        lexer = _InitLexer('@(')
        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.ExtGlob_At, '@('), t)

        lexer = _InitLexer('*(')
        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.ExtGlob_Star, '*('), t)

        lexer = _InitLexer('?(')
        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.ExtGlob_QMark, '?('), t)

        lexer = _InitLexer('$')
        t = lexer.Read(lex_mode_e.ExtGlob)
        self.assertTokensEqual(Tok(Id.Lit_Other, '$'), t)
コード例 #7
0
    def testMode_BashRegex(self):
        lexer = _InitLexer('(foo|bar)')

        t = lexer.Read(lex_mode_e.BashRegex)
        self.assertTokensEqual(Tok(Id.Lit_Other, '('), t)

        t = lexer.Read(lex_mode_e.BashRegex)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'foo'), t)

        t = lexer.Read(lex_mode_e.BashRegex)
        self.assertTokensEqual(Tok(Id.Lit_Other, '|'), t)
コード例 #8
0
    def testPushHint(self):
        # Extglob use case
        lexer = _InitLexer('@()')
        lexer.PushHint(Id.Op_RParen, Id.Right_ExtGlob)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.ExtGlob_At, '@('), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Right_ExtGlob, None), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Eof_Real, ''), t)
コード例 #9
0
    def testRangePartDetect(self):
        CASES = [
            ('', None),
            ('1', None),
            ('1..', None),
            ('1..3', ('1', '3')),
            ('3..-10..-2', ('3', '-10', -2)),
            ('3..-10..-2..', None),  # nope!  unexpected trailing tokens
            ('a', None),
            ('a..', None),
            ('a..z', ('a', 'z')),
            ('a..z..', None),
            ('z..a..-1', ('z', 'a', -1)),
        ]
        for s, expected in CASES:
            tok = Tok(Id.Lit_Chars, s)
            part = braces._RangePartDetect(tok)
            if expected is None:
                self.assert_(part is None)
            elif len(expected) == 2:
                s, e = expected
                self.assertEqual(s, part.start)
                self.assertEqual(e, part.end)
                #self.assertEqual(runtime.NO_SPID, part.step)

            elif len(expected) == 3:
                s, e, step = expected
                self.assertEqual(s, part.start)
                self.assertEqual(e, part.end)
                self.assertEqual(step, part.step)

            else:
                raise AssertionError()

            log('%r\t%s', s, part)
コード例 #10
0
    def testEmitCompDummy(self):
        lexer = _InitLexer('echo ')
        lexer.EmitCompDummy()

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'echo'), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.WS_Space, None), t)

        # Right before EOF
        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_CompDummy, ''), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Eof_Real, ''), t)
コード例 #11
0
def _assertReadWordWithArena(test, w_parser):
    w = w_parser.ReadWord(lex_mode_e.ShCommand)
    assert w is not None
    w.PrettyPrint()

    # Next word must be Eof_Real
    w2 = w_parser.ReadWord(lex_mode_e.ShCommand)
    test.assertTrue(test_lib.TokensEqual(Tok(Id.Eof_Real, ''), w2), w2)
    return w
コード例 #12
0
    def testLookAhead(self):
        # I think this is the usage pattern we care about.  Peek and Next() past
        # the function; then Peek() the next token.  Then Lookahead in that state.
        lexer = _InitLexer('fun()')

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'fun'), t)

        #self.assertEqual(Id.Op_LParen, lexer.LookAhead())

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Op_LParen, None), t)

        self.assertEqual(Id.Op_RParen, lexer.LookAhead(lex_mode_e.ShCommand))

        lexer = _InitLexer('fun ()')

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.Lit_Chars, 'fun'), t)

        t = lexer.Read(lex_mode_e.ShCommand)
        self.assertTokensEqual(Tok(Id.WS_Space, None), t)

        self.assertEqual(Id.Op_LParen, lexer.LookAhead(lex_mode_e.ShCommand))
コード例 #13
0
    def testTokens(self):
        print(Id.Op_Newline)
        print(Tok(Id.Op_Newline, '\n'))

        print(Id.Op_Newline)

        print(Kind.Eof)
        print(Kind.Left)

        print('--')
        num_kinds = 0
        for name in dir(Kind):
            if name[0].isupper():
                kind = getattr(Kind, name)
                print('%-20s %s' % (name, kind))
                num_kinds += 1

        print()
        print('Number of Kinds:', num_kinds)
        print()

        for name in dir(Id):
            if name[0].isupper():
                id_ = getattr(Id, name)
                print('%-30s %s' % (name, id_))

        # 309 out of 256 tokens now
        print()
        print('Number of IDs:', len(ID_SPEC.id_str2int))

        t = Tok(Id.Arith_Plus, '+')
        self.assertEqual(Kind.Arith, LookupKind(t.id))
        t = Tok(Id.Arith_CaretEqual, '^=')
        self.assertEqual(Kind.Arith, LookupKind(t.id))
        t = Tok(Id.Arith_RBrace, '}')
        self.assertEqual(Kind.Arith, LookupKind(t.id))

        t = Tok(Id.BoolBinary_GlobDEqual, '==')
        self.assertEqual(Kind.BoolBinary, LookupKind(t.id))

        t = Tok(Id.BoolBinary_Equal, '=')
        self.assertEqual(Kind.BoolBinary, LookupKind(t.id))
コード例 #14
0
    def testHereDoc(self):
        w_parser = test_lib.InitWordParser("""\
ls foo

# Multiple newlines and comments should be ignored

ls bar
""")
        print('--MULTI')
        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        parts = [Tok(Id.Lit_Chars, 'ls')]
        test_lib.AssertAsdlEqual(self, compound_word(parts), w)

        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        parts = [Tok(Id.Lit_Chars, 'foo')]
        test_lib.AssertAsdlEqual(self, compound_word(parts), w)

        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        t = Tok(Id.Op_Newline, None)
        test_lib.AssertAsdlEqual(self, t, w)

        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        parts = [Tok(Id.Lit_Chars, 'ls')]
        test_lib.AssertAsdlEqual(self, compound_word(parts), w)

        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        parts = [Tok(Id.Lit_Chars, 'bar')]
        test_lib.AssertAsdlEqual(self, compound_word(parts), w)

        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        t = Tok(Id.Op_Newline, None)
        test_lib.AssertAsdlEqual(self, t, w)

        w = w_parser.ReadWord(lex_mode_e.ShCommand)
        t = Tok(Id.Eof_Real, '')
        test_lib.AssertAsdlEqual(self, t, w)
コード例 #15
0
 def testReadOuter(self):
     l = LineLexer('\n', self.arena)
     self.assertTokensEqual(Tok(Id.Op_Newline, None),
                            l.Read(lex_mode_e.ShCommand))
コード例 #16
0
 def testMode_DBracket(self):
     lex = _InitLexer('-z foo')
     t = lex.Read(lex_mode_e.DBracket)
     self.assertTokensEqual(Tok(Id.BoolUnary_z, '-z'), t)
     self.assertEqual(Kind.BoolUnary, lookup.LookupKind(t.id))