def testLeftArrow(self): tok = pysh.Tokenizer('x<-echo') self.assertEquals([(LITERAL, 'x'), (LEFT_ARROW, '<-'), (LITERAL, 'echo'), (EOF, '')], list(tok)) tok = pysh.Tokenizer('x <- echo') self.assertEquals([(LITERAL, 'x'), (LEFT_ARROW, '<-'), (LITERAL, 'echo'), (EOF, '')], list(tok))
def testPipeWithoutSpace(self): tok = pysh.Tokenizer('cat|/tmp/out') self.assertEquals([ (LITERAL, 'cat'), (PIPE, '|'), (LITERAL, '/tmp/out'), (EOF, ''), ], list(tok))
def testSubstitutionUnderscore(self): tok = pysh.Tokenizer('echo $__init__') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SUBSTITUTION, '$__init__'), (EOF, ''), ], list(tok))
def testExpression(self): tok = pysh.Tokenizer('echo ${{1: 10}}') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SUBSTITUTION, '${{1: 10}}'), (EOF, ''), ], list(tok))
def test2_2(self): tok = pysh.Tokenizer('cat\t/tmp/www/foo.txt') self.assertEquals([ (LITERAL, 'cat'), (SPACE, ' '), (LITERAL, '/tmp/www/foo.txt'), (EOF, ''), ], list(tok))
def testSubstitutionSuffix(self): alias_map = {'ls': ('ls -la', False)} tok = pysh.Tokenizer('ls$x', alias_map=alias_map) self.assertEquals([ (LITERAL, 'ls'), (SUBSTITUTION, '$x'), (EOF, ''), ], list(tok))
def testSemicolon(self): input = 'echo; cat;' parser = pysh.Parser(pysh.Tokenizer(input)) ast = parser.parse() self.assertEquals(3, len(ast)) self.assertEquals(';', ast[0]) self.assertTrue(isinstance(ast[1], pysh.Process)) self.assertTrue(isinstance(ast[2], pysh.Process))
def testSubstitutionPrefix(self): alias_map = {'GR': ('| grep', True)} tok = pysh.Tokenizer('${x}GR', alias_map=alias_map) self.assertEquals([ (SUBSTITUTION, '${x}'), (LITERAL, 'GR'), (EOF, ''), ], list(tok))
def testSubstitutionWithoutSpace(self): tok = pysh.Tokenizer('echo hoge$a') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'hoge'), (SUBSTITUTION, '$a'), (EOF, ''), ], list(tok))
def testRecursiveNotGlobal(self): alias_map = {'ls': ('ls -la', False), 'sl': ('ls', True)} tok = pysh.Tokenizer('echo sl', alias_map=alias_map) self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'ls'), (EOF, ''), ], list(tok))
def testBraceSubstitution(self): tok = pysh.Tokenizer('echo hoge${a}') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'hoge'), (SUBSTITUTION, '${a}'), (EOF, ''), ], list(tok))
def testBackquoteWithSemicolon(self): input = 'echo `echo foo | cat`' parser = pysh.Parser(pysh.Tokenizer(input)) ast = parser.parse() self.assertTrue(isinstance(ast, pysh.Process)) self.assertEquals(2, len(ast.args)) self.assertEquals(1, len(ast.args[1])) self.assertEquals(BACKQUOTE, ast.args[1][0][0]) self.assertEquals('|', ast.args[1][0][1][0])
def testRedirectAppend(self): tok = pysh.Tokenizer('echo \'abc\'"def"') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SINGLE_QUOTED_STRING, '\'abc\''), (DOUBLE_QUOTED_STRING, '"def"'), (EOF, ''), ], list(tok))
def testRedirectInvalidName(self): tok = pysh.Tokenizer('echo $10') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, '$'), (LITERAL, '10'), (EOF, ''), ], list(tok))
def testRedirectAppend(self): tok = pysh.Tokenizer('echo a>>/tmp/out') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'a'), (REDIRECT, '>>'), (LITERAL, '/tmp/out'), (EOF, ''), ], list(tok))
def testGlobal(self): alias_map = {'GR': ('| grep &&', True)} tok = pysh.Tokenizer('ls GR echo', alias_map=alias_map) self.assertEquals([ (LITERAL, 'ls'), (PIPE, '|'), (LITERAL, 'grep'), (AND_OP, '&&'), (LITERAL, 'echo'), (EOF, ''), ], list(tok))
def testNotGlobal(self): alias_map = {'ls': ('ls -la', False)} tok = pysh.Tokenizer('echo ls /tmp/www/foo.txt', alias_map=alias_map) self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'ls'), (SPACE, ' '), (LITERAL, '/tmp/www/foo.txt'), (EOF, ''), ], list(tok))
def testSubstitution(self): tok = pysh.Tokenizer('echo $a$b/$c') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SUBSTITUTION, '$a'), (SUBSTITUTION, '$b'), (LITERAL, '/'), (SUBSTITUTION, '$c'), (EOF, ''), ], list(tok))
def testBackquoteWithSpace(self): input = 'echo ` echo foo `' parser = pysh.Parser(pysh.Tokenizer(input)) ast = parser.parse() self.assertTrue(isinstance(ast, pysh.Process)) self.assertEquals(2, len(ast.args)) self.assertEquals(1, len(ast.args[1])) self.assertEquals(BACKQUOTE, ast.args[1][0][0]) self.assertTrue(isinstance(ast.args[1][0][1], pysh.Process)) self.assertEquals([[(LITERAL, 'echo')], [(LITERAL, 'foo')]], ast.args[1][0][1].args)
def testAndOrOperator(self): tok = pysh.Tokenizer('foo && bar || a&&b||c') self.assertEquals([ (LITERAL, 'foo'), (AND_OP, '&&'), (LITERAL, 'bar'), (OR_OP, '||'), (LITERAL, 'a'), (AND_OP, '&&'), (LITERAL, 'b'), (OR_OP, '||'), (LITERAL, 'c'), (EOF, ''), ], list(tok))
def test(self): input = 'echo hoge$foo || echo piyo && cat' parser = pysh.Parser(pysh.Tokenizer(input)) ast = parser.parse() self.assertEquals(3, len(ast)) self.assertEquals('&&', ast[0]) self.assertEquals(3, len(ast[1])) self.assertEquals('||', ast[1][0]) proc0 = ast[1][1] self.assertTrue(isinstance(proc0, pysh.Process)) self.assertEquals([[('literal', 'echo')], [('literal', 'hoge'), ('substitution', '$foo')]], proc0.args) self.assertFalse(proc0.redirects) proc1 = ast[1][2] self.assertTrue(isinstance(proc1, pysh.Process)) self.assertEquals([[('literal', 'echo')], [('literal', 'piyo')]], proc1.args) self.assertFalse(proc1.redirects) proc2 = ast[2] self.assertTrue(isinstance(proc2, pysh.Process)) self.assertEquals([[('literal', 'cat')]], proc2.args) self.assertFalse(proc2.redirects)
def testBackQuoteWithSpace(self): tok = pysh.Tokenizer('echo ` echo foo `') self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (BACKQUOTE, '`'), (SPACE, ' '), (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'foo'), (SPACE, ' '), (BACKQUOTE, '`'), (EOF, '')], list(tok))
def getAst(self, cmd): tok = pysh.Tokenizer(cmd) parser = pysh.Parser(tok) return parser.parse()
def testParenthesis(self): tok = pysh.Tokenizer('() a(b)') self.assertEquals([(PARENTHESIS_START, '('), (PARENTHESIS_END, ')'), (LITERAL, 'a'), (PARENTHESIS_START, '('), (LITERAL, 'b'), (PARENTHESIS_END, ')'), (EOF, '')], list(tok))
def testSemicolonWithSpace(self): tok = pysh.Tokenizer('a ; b ; ') self.assertEquals([(LITERAL, 'a'), (SEMICOLON, ';'), (LITERAL, 'b'), (SEMICOLON, ';'), (EOF, '')], list(tok))