示例#1
0
 def testLeftArrow(self):
     tok = pysh.Tokenizer('x<-echo')
     self.assertEquals([(LITERAL, 'x'), (LEFT_ARROW, '<-'),
                        (LITERAL, 'echo'), (EOF, '')], list(tok))
     tok = pysh.Tokenizer('x <- echo')
     self.assertEquals([(LITERAL, 'x'), (LEFT_ARROW, '<-'),
                        (LITERAL, 'echo'), (EOF, '')], list(tok))
示例#2
0
 def testPipeWithoutSpace(self):
     tok = pysh.Tokenizer('cat|/tmp/out')
     self.assertEquals([
         (LITERAL, 'cat'),
         (PIPE, '|'),
         (LITERAL, '/tmp/out'),
         (EOF, ''),
     ], list(tok))
示例#3
0
 def testSubstitutionUnderscore(self):
     tok = pysh.Tokenizer('echo $__init__')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SUBSTITUTION, '$__init__'),
         (EOF, ''),
     ], list(tok))
示例#4
0
 def testExpression(self):
     tok = pysh.Tokenizer('echo ${{1: 10}}')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SUBSTITUTION, '${{1: 10}}'),
         (EOF, ''),
     ], list(tok))
示例#5
0
 def test2_2(self):
     tok = pysh.Tokenizer('cat\t/tmp/www/foo.txt')
     self.assertEquals([
         (LITERAL, 'cat'),
         (SPACE, ' '),
         (LITERAL, '/tmp/www/foo.txt'),
         (EOF, ''),
     ], list(tok))
示例#6
0
 def testSubstitutionSuffix(self):
     alias_map = {'ls': ('ls -la', False)}
     tok = pysh.Tokenizer('ls$x', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'ls'),
         (SUBSTITUTION, '$x'),
         (EOF, ''),
     ], list(tok))
示例#7
0
 def testSemicolon(self):
     input = 'echo; cat;'
     parser = pysh.Parser(pysh.Tokenizer(input))
     ast = parser.parse()
     self.assertEquals(3, len(ast))
     self.assertEquals(';', ast[0])
     self.assertTrue(isinstance(ast[1], pysh.Process))
     self.assertTrue(isinstance(ast[2], pysh.Process))
示例#8
0
 def testSubstitutionPrefix(self):
     alias_map = {'GR': ('| grep', True)}
     tok = pysh.Tokenizer('${x}GR', alias_map=alias_map)
     self.assertEquals([
         (SUBSTITUTION, '${x}'),
         (LITERAL, 'GR'),
         (EOF, ''),
     ], list(tok))
示例#9
0
 def testSubstitutionWithoutSpace(self):
     tok = pysh.Tokenizer('echo hoge$a')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'hoge'),
         (SUBSTITUTION, '$a'),
         (EOF, ''),
     ], list(tok))
示例#10
0
 def testRecursiveNotGlobal(self):
     alias_map = {'ls': ('ls -la', False), 'sl': ('ls', True)}
     tok = pysh.Tokenizer('echo sl', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'ls'),
         (EOF, ''),
     ], list(tok))
示例#11
0
 def testBraceSubstitution(self):
     tok = pysh.Tokenizer('echo hoge${a}')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'hoge'),
         (SUBSTITUTION, '${a}'),
         (EOF, ''),
     ], list(tok))
示例#12
0
 def testBackquoteWithSemicolon(self):
     input = 'echo `echo foo | cat`'
     parser = pysh.Parser(pysh.Tokenizer(input))
     ast = parser.parse()
     self.assertTrue(isinstance(ast, pysh.Process))
     self.assertEquals(2, len(ast.args))
     self.assertEquals(1, len(ast.args[1]))
     self.assertEquals(BACKQUOTE, ast.args[1][0][0])
     self.assertEquals('|', ast.args[1][0][1][0])
示例#13
0
 def testRedirectAppend(self):
     tok = pysh.Tokenizer('echo \'abc\'"def"')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SINGLE_QUOTED_STRING, '\'abc\''),
         (DOUBLE_QUOTED_STRING, '"def"'),
         (EOF, ''),
     ], list(tok))
示例#14
0
 def testRedirectInvalidName(self):
     tok = pysh.Tokenizer('echo $10')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, '$'),
         (LITERAL, '10'),
         (EOF, ''),
     ], list(tok))
示例#15
0
 def testRedirectAppend(self):
     tok = pysh.Tokenizer('echo a>>/tmp/out')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'a'),
         (REDIRECT, '>>'),
         (LITERAL, '/tmp/out'),
         (EOF, ''),
     ], list(tok))
示例#16
0
 def testGlobal(self):
     alias_map = {'GR': ('| grep &&', True)}
     tok = pysh.Tokenizer('ls GR echo', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'ls'),
         (PIPE, '|'),
         (LITERAL, 'grep'),
         (AND_OP, '&&'),
         (LITERAL, 'echo'),
         (EOF, ''),
     ], list(tok))
示例#17
0
 def testNotGlobal(self):
     alias_map = {'ls': ('ls -la', False)}
     tok = pysh.Tokenizer('echo ls /tmp/www/foo.txt', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'ls'),
         (SPACE, ' '),
         (LITERAL, '/tmp/www/foo.txt'),
         (EOF, ''),
     ], list(tok))
示例#18
0
 def testSubstitution(self):
     tok = pysh.Tokenizer('echo $a$b/$c')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SUBSTITUTION, '$a'),
         (SUBSTITUTION, '$b'),
         (LITERAL, '/'),
         (SUBSTITUTION, '$c'),
         (EOF, ''),
     ], list(tok))
示例#19
0
 def testBackquoteWithSpace(self):
     input = 'echo ` echo foo `'
     parser = pysh.Parser(pysh.Tokenizer(input))
     ast = parser.parse()
     self.assertTrue(isinstance(ast, pysh.Process))
     self.assertEquals(2, len(ast.args))
     self.assertEquals(1, len(ast.args[1]))
     self.assertEquals(BACKQUOTE, ast.args[1][0][0])
     self.assertTrue(isinstance(ast.args[1][0][1], pysh.Process))
     self.assertEquals([[(LITERAL, 'echo')], [(LITERAL, 'foo')]],
                       ast.args[1][0][1].args)
示例#20
0
 def testAndOrOperator(self):
     tok = pysh.Tokenizer('foo && bar || a&&b||c')
     self.assertEquals([
         (LITERAL, 'foo'),
         (AND_OP, '&&'),
         (LITERAL, 'bar'),
         (OR_OP, '||'),
         (LITERAL, 'a'),
         (AND_OP, '&&'),
         (LITERAL, 'b'),
         (OR_OP, '||'),
         (LITERAL, 'c'),
         (EOF, ''),
     ], list(tok))
示例#21
0
 def test(self):
     input = 'echo hoge$foo || echo piyo && cat'
     parser = pysh.Parser(pysh.Tokenizer(input))
     ast = parser.parse()
     self.assertEquals(3, len(ast))
     self.assertEquals('&&', ast[0])
     self.assertEquals(3, len(ast[1]))
     self.assertEquals('||', ast[1][0])
     proc0 = ast[1][1]
     self.assertTrue(isinstance(proc0, pysh.Process))
     self.assertEquals([[('literal', 'echo')],
                        [('literal', 'hoge'), ('substitution', '$foo')]],
                       proc0.args)
     self.assertFalse(proc0.redirects)
     proc1 = ast[1][2]
     self.assertTrue(isinstance(proc1, pysh.Process))
     self.assertEquals([[('literal', 'echo')], [('literal', 'piyo')]],
                       proc1.args)
     self.assertFalse(proc1.redirects)
     proc2 = ast[2]
     self.assertTrue(isinstance(proc2, pysh.Process))
     self.assertEquals([[('literal', 'cat')]], proc2.args)
     self.assertFalse(proc2.redirects)
示例#22
0
 def testBackQuoteWithSpace(self):
     tok = pysh.Tokenizer('echo ` echo foo `')
     self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (BACKQUOTE, '`'),
                        (SPACE, ' '), (LITERAL, 'echo'), (SPACE, ' '),
                        (LITERAL, 'foo'), (SPACE, ' '), (BACKQUOTE, '`'),
                        (EOF, '')], list(tok))
示例#23
0
 def getAst(self, cmd):
     tok = pysh.Tokenizer(cmd)
     parser = pysh.Parser(tok)
     return parser.parse()
示例#24
0
 def testParenthesis(self):
     tok = pysh.Tokenizer('() a(b)')
     self.assertEquals([(PARENTHESIS_START, '('), (PARENTHESIS_END, ')'),
                        (LITERAL, 'a'), (PARENTHESIS_START, '('),
                        (LITERAL, 'b'), (PARENTHESIS_END, ')'), (EOF, '')],
                       list(tok))
示例#25
0
 def testSemicolonWithSpace(self):
     tok = pysh.Tokenizer('a ; b ; ')
     self.assertEquals([(LITERAL, 'a'), (SEMICOLON, ';'), (LITERAL, 'b'),
                        (SEMICOLON, ';'), (EOF, '')], list(tok))