Exemplo n.º 1
0
    def test_unique_list_with_no_dups(self):
        expected_unique_dups = 0
        results = UrlArgProcessing.reduce_url_list(
            url_list=self.UNIQUE_URL_LIST)

        # Unique List
        assert_equals(len(self.UNIQUE_URL_LIST),
                      len(results[UrlArgProcessing.REDUCED_LIST]))

        # Total Duplicates
        assert_equals(len(results[UrlArgProcessing.TOTAL_DUP_LIST]),
                      expected_unique_dups)

        # Unique Duplicates
        assert_equals(len(results[UrlArgProcessing.UNIQUE_DUP_LIST]),
                      expected_unique_dups)
Exemplo n.º 2
0
    def test_unique_list_with_single_dups(self):
        # Create a list of duplicate elements (unique * 2)
        dup_list = self.UNIQUE_URL_LIST[:]
        dup_list.extend(self.UNIQUE_URL_LIST)

        # Expected number of duplicates
        expected_unique_dups = len(self.UNIQUE_URL_LIST)
        results = UrlArgProcessing.reduce_url_list(url_list=dup_list)

        # Unique List
        assert_equals(len(results[UrlArgProcessing.REDUCED_LIST]),
                      len(self.UNIQUE_URL_LIST))

        # Total Duplicates
        assert_equals(len(results[UrlArgProcessing.TOTAL_DUP_LIST]),
                      expected_unique_dups)

        # Unique Duplicates
        assert_equals(len(results[UrlArgProcessing.UNIQUE_DUP_LIST]),
                      expected_unique_dups)