예제 #1
0
class CoverageTest(unittest.TestCase):
    def setUp(self):
        self.tokenizer = Tokenizer()
        self.parser = Parser()
        self.test_dir = mkdtemp()
        self.debian_dir = os.path.join(self.test_dir, "debian")
        self.copyright_file_path = os.path.join(self.debian_dir, "copyright")
        os.makedirs(self.debian_dir)

    def fake_file(self, filename, dir=None):
        if dir is None:
            dir = self.test_dir
        with open(os.path.join(dir, filename), 'w'):
            pass

    def get_paragraphs(self, txt):
        open(self.copyright_file_path, 'w').write("".join(txt))
        paragraphs = self.tokenizer.get_paragraphs(txt)
        self.parser._guess_types(paragraphs)
        self.parser.process(paragraphs)
        return paragraphs

    def test_test(self):
        self.fake_file("foobar.foo")
        self.fake_file("sara.sa", self.debian_dir)
        paragraphs = self.get_paragraphs(two_fp_without_header)
        coverage = Coverage(paragraphs, self.test_dir)
예제 #2
0
 def test_one_paragraph_one_token(self):
     tokenizer = Tokenizer()
     data = "Files: foobar.foo"
     tokenized_data = tokenizer.get_paragraphs(data.splitlines(True))
     self.assertEqual(len(tokenized_data), 1)
     first_paragraph = tokenized_data[0]
     self.assertEqual(len(first_paragraph), 1)
     first_field = list(first_paragraph)[0]
     self.assertEqual(first_field.line_number, 1)
     self.assertEqual(first_field.name, "Files")
     self.assertEqual(list(first_field)[0], " foobar.foo")
예제 #3
0
class RuleTest(unittest.TestCase):
    def setUp(self):
        self.tokenizer = Tokenizer()
        self.parser = Parser()

    def get_paragraphs(self, txt):
        paragraphs = self.tokenizer.get_paragraphs(txt)
        self.parser._guess_types(paragraphs)
        return paragraphs

    def print_messages(self, rule):
        for message in rule.messages:
            self.parser._print_message(message)
예제 #4
0
    def test_two_paragraph_one_token(self):
        tokenizer = Tokenizer()
        data = """Files: foobar.foo

Copyright: Foo Bar <*****@*****.**>"""
        tokenized_data = tokenizer.get_paragraphs(data.splitlines(True))
        self.assertEqual(len(tokenized_data), 2)
        first_paragraph = tokenized_data[0]
        self.assertEqual(len(first_paragraph), 1)
        first_field = list(first_paragraph)[0]
        self.assertEqual(first_field.line_number, 1)
        self.assertEqual(first_field.name, "Files")
        self.assertEqual(list(first_field)[0], " foobar.foo\n")
예제 #5
0
    def test_two_paragraph_three_tokens(self):
        tokenizer = Tokenizer()
        data = """

Files: foobar.foo
Copyright: Foo Bar <*****@*****.**>
License: Beerware


Files: sara.sa
Copyright: Sara Sa <*****@*****.**>
License: Vaporware

"""
        tokenized_data = tokenizer.get_paragraphs(data.splitlines(True))
        self.assertEqual(len(tokenized_data), 2)
        first_paragraph = tokenized_data[0]
        self.assertEqual(len(first_paragraph), 3)
        first_field = list(first_paragraph)[0]
        self.assertEqual(first_field.line_number, 3)
        self.assertEqual(first_field.name, "Files")
        self.assertEqual(list(first_field)[0], " foobar.foo\n")
        second_field = list(first_paragraph)[1]
        self.assertEqual(second_field.line_number, 4)
        self.assertEqual(second_field.name, "Copyright")
        self.assertEqual(list(second_field)[0], " Foo Bar <*****@*****.**>\n")
        third_field = list(first_paragraph)[2]
        self.assertEqual(third_field.line_number, 5)
        self.assertEqual(third_field.name, "License")
        self.assertEqual(list(third_field)[0], " Beerware\n")

        second_paragraph = tokenized_data[1]
        self.assertEqual(len(second_paragraph), 3)
        first_field = list(second_paragraph)[0]
        self.assertEqual(first_field.line_number, 8)
        self.assertEqual(first_field.name, "Files")
        self.assertEqual(list(first_field)[0], " sara.sa\n")
        second_field = list(second_paragraph)[1]
        self.assertEqual(second_field.line_number, 9)
        self.assertEqual(second_field.name, "Copyright")
        self.assertEqual(list(second_field)[0], " Sara Sa <*****@*****.**>\n")
        third_field = list(second_paragraph)[2]
        self.assertEqual(third_field.line_number, 10)
        self.assertEqual(third_field.name, "License")
        self.assertEqual(list(third_field)[0], " Vaporware\n")
예제 #6
0
 def test_default_init(self):
     tokenizer = Tokenizer()
     paragraphs = tokenizer.get_paragraphs(two_fp_with_invalid_field)
     parser = Parser()
     self.assertFalse(parser.process(paragraphs))