class TestScrapperGraph(unittest.TestCase):

    out_text_file = "_.txt"
    out_ttl_file = "_.ttl"
    out_ttl_file_2 = "_2.ttl"

    @classmethod
    def setUpClass(cls):

        cls.res = get_synonyms_from_scrappers(
            cls.word, cls.lang, cls.depth, merge_graph=False
        )
        cls.scrapper_names = [scr.website for scr in scrappers[cls.lang]]
        # print(cls.word)
        # print(cls.res[0].to_text_file())

    def setUp(self):
        # self.res = get_synonyms_from_scrappers(
        #     "test", self.lang, self.depth, merge_graph=False
        # )
        self.merged_graph = Graph()

        for g in self.res:
            self.merged_graph += g
        with open(self.out_ttl_file, "w") as f:
            print(self.merged_graph, file=f)

        self.words = []
        for g in self.res:
            self.words += g.to_list()

        self.words = set(self.words)

        touch(self.out_text_file)
        touch(self.out_ttl_file)
        touch(self.out_ttl_file_2)

    def tearDown(self):
        os.remove(self.out_text_file)
        os.remove(self.out_ttl_file)
        os.remove(self.out_ttl_file_2)
        # return

    def test_same_output(self):
        for word in self.words:
            self.assertTrue(word in self.merged_graph)

    def test_not_loosing_word_in_output(self):
        self.merged_graph.to_text_file(self.out_text_file)
        words_in_file = []
        with open(self.out_text_file) as f:
            for line in f:
                if line.strip():
                    self.assertTrue(
                        line.strip() in self.merged_graph, f"word is: {line.strip()}"
                    )
                    self.assertTrue(line.strip() in self.words)
                    words_in_file.append(line.strip())
        self.assertEqual(len(words_in_file), len(self.words))
        self.assertEqual(len(words_in_file), len(set(words_in_file)))

    def test_delete_several_depth(self):
        self.merged_graph.delete_several_depth()
        self.test_not_loosing_word_in_output()
        self.test_same_output()
        # cheking if there are no tripple with seveal depth
        q = ""
        with open(self.out_ttl_file_2, "w") as f:
            print(self.merged_graph, file=f)

    def test_all_scrappers_return_results(self):
        for graph, scrapper_name in zip(self.res, self.scrapper_names):
            with self.subTest():
                self.assertFalse(
                    graph.is_empty(),
                    msg=f"Error with scrapper of '{scrapper_name}'. lang is '{self.lang}'. word is '{self.word}'",
                )
                self.assertTrue(
                    graph.contains_synonyms(),
                    msg=f"Error with scrapper of '{scrapper_name}'. lang is '{self.lang}'. word is '{self.word}'",
                )

    def test_no_W_in_words(self):
        for word in self.merged_graph.to_list():
            with self.subTest(msg=f"test failed with word '{word}'"):
                # no parenthesis
                self.assertFalse(re.search(r"[\(\)]", word))
                # htm escaped
                self.assertEqual(html.unescape(word), word)
                # no html tag
                self.assertFalse(re.search(r"[<>]", word))
                # no space
                self.assertEqual(word.strip(), word)