Beispiel #1
0
    def test_tokenize(self):
        def testtok(list_):
            s = rQ.joinTokens(list_, delimiter=',')
            a = rQ.tokenize(s, delimiter=',')
            self.assertEqual(list_, a)

        testtok(["a", "b", "c,,,,,,,,,,,,b", "set-0:c=b"])
        testtok(["a"])
        testtok([])

        self.assertEqual(2, len(rQ.tokenize("set-0:a=b", delimiter=":")))
        self.assertEqual(
            2,
            len(rQ.tokenize(rQ.tokenize("set-0:a=b", ':', '\\')[1], '=',
                            '\\')))
        self.assertEqual(
            3, len(rQ.tokenize("this \\, is, a,test", delimiter=",")))
        self.assertEqual(4, len(rQ.tokenize("this , is, a,test",
                                            delimiter=",")))
        self.assertEqual("this,is", rQ.joinTokens(["this", "is"]))
        self.assertEqual("t\\,his,is", rQ.joinTokens(["t,his", "is"]))
Beispiel #2
0
 def test_tokenize7(self):
     r = rQ.tokenize(" Hello $ World $ this\$  is a $ test   ",
                     keepEmpty=False,
                     strip=True,
                     delimiter="$")
     self.assertEqual(["Hello", "World", "this$  is a", "test"], r)
Beispiel #3
0
 def test_tokenize6(self):
     r = rQ.tokenize(" Hello World this\ is a test   ",
                     delimiter=" ",
                     keepEmpty=True)
     self.assertEqual(
         ["", "Hello", "World", "this is", "a", "test", "", "", ""], r)
Beispiel #4
0
 def test_tokenize2(self):
     r = rQ.tokenize("Hello World", delimiter=" ")
     self.assertEqual(["Hello", "World"], r)
Beispiel #5
0
 def test_tokenize1(self):
     r = rQ.tokenize("Hello")
     self.assertEqual(["Hello"], r)
Beispiel #6
0
 def testtok(list_):
     s = rQ.joinTokens(list_, delimiter=',')
     a = rQ.tokenize(s, delimiter=',')
     self.assertEqual(list_, a)