def test_tokenize_slashslash_comment_then_newline2(self): tokens = list( parse_deps._tokenize_js( """A // foo bar""" ) ) self.assertEquals(["A ", "//", " foo", "\n", "bar"], tokens)
def test_tokenize_cstyle_comment(self): tokens = list( parse_deps._tokenize_js( """A /* foo *bar */""" ) ) self.assertEquals(["A ", "/*", " foo", "\n", "*bar", "\n", "*/"], tokens)
def test_tokenize_0(self): tokens = list(parse_deps._tokenize_js("")) self.assertEquals([], tokens)
def test_tokenize_cstyle_comment(self): tokens = list(parse_deps._tokenize_js("""A /* foo *bar */""")) self.assertEquals(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens)
def test_tokenize_slashslash_comment_then_newline2(self): tokens = list(parse_deps._tokenize_js("""A // foo bar""" )) self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
def test_tokenize_slashslash_comment(self): tokens = list(parse_deps._tokenize_js('A // foo')) self.assertEquals(['A ', '//', ' foo'], tokens)
def test_tokenize_nl(self): tokens = list(parse_deps._tokenize_js('\n')) self.assertEquals(['\n'], tokens)
def test_tokenize_cstyle_comment(self): tokens = list(parse_deps._tokenize_js("""A /* foo *bar */""")) self.assertEquals(["A ", "/*", " foo", "\n", "*bar", "\n", "*/"], tokens)
def test_tokenize_slashslash_comment(self): tokens = list(parse_deps._tokenize_js("A // foo")) self.assertEquals(["A ", "//", " foo"], tokens)
def test_tokenize_nl(self): tokens = list(parse_deps._tokenize_js("\n")) self.assertEquals(["\n"], tokens)
def test_tokenize_slashslash_comment_then_newline2(self): tokens = list(parse_deps._tokenize_js("""A // foo bar""")) self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
def test_tokenize_slashslash_comment_then_newline2(self): tokens = list(parse_deps._tokenize_js("""A // foo bar""")) self.assertEquals(["A ", "//", " foo", "\n", "bar"], tokens)