def test_tokenize_slashslash_comment(self): tokens = list(strip_js_comments._TokenizeJS("A // foo")) self.assertEquals(["A ", "//", " foo"], tokens)
def test_tokenize_slashslash_comment_then_newline(self): tokens = list(strip_js_comments._TokenizeJS("A // foo\nbar")) self.assertEquals(["A ", "//", " foo", "\n", "bar"], tokens)
def test_tokenize_empty(self): tokens = list(strip_js_comments._TokenizeJS("")) self.assertEquals([], tokens)
def test_tokenize_nl(self): tokens = list(strip_js_comments._TokenizeJS("\n")) self.assertEquals(["\n"], tokens)
def test_tokenize_cstyle_comment_one_line(self): tokens = list(strip_js_comments._TokenizeJS('A /* foo */')) self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens)
def test_tokenize_cstyle_comment_multi_line(self): tokens = list(strip_js_comments._TokenizeJS('A /* foo\n*bar\n*/')) self.assertEquals(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens)
def test_tokenize_slashslash_comment(self): tokens = list(strip_js_comments._TokenizeJS('A // foo')) self.assertEquals(['A ', '//', ' foo'], tokens)
def test_tokenize_empty(self): tokens = list(strip_js_comments._TokenizeJS('')) self.assertEquals([], tokens)
def test_tokenize_nl(self): tokens = list(strip_js_comments._TokenizeJS('\n')) self.assertEquals(['\n'], tokens)
def test_tokenize_slashslash_comment_then_newline(self): tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar')) self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
def test_tokenize_cstyle_comment_one_line(self): tokens = list(strip_js_comments._TokenizeJS("A /* foo */")) self.assertEquals(["A ", "/*", " foo ", "*/"], tokens)
def test_tokenize_cstyle_comment_multi_line(self): tokens = list(strip_js_comments._TokenizeJS("A /* foo\n*bar\n*/")) self.assertEquals(["A ", "/*", " foo", "\n", "*bar", "\n", "*/"], tokens)