def test_cache_save_existing(self): c = GzipCache(COMPRESS_CACHE_DIRECTORY) # verify that it adds a file to a pre-existing cached site html_string = "<html></html>".encode("utf-8") example_url = "http://www.example.com/testpage.html" d = dir_domain(example_url) f = clean_url_hash(example_url) full_save_name = os.path.join(COMPRESS_CACHE_DIRECTORY, d, f) self.assertNotIn(full_save_name, c.sites[d]) c.save(example_url, html_string) zip_name = "{}.gz".format(full_save_name) self.assertTrue(os.path.exists(zip_name)) self.assertIn(full_save_name, c.sites[d]) # cleanup os.remove(zip_name)
def test_cache_save_new(self): c = GzipCache(COMPRESS_CACHE_DIRECTORY) html_string = "<html></html>".encode("utf-8") sample_url = "http://www.sample.com/testpage.html" d = dir_domain(sample_url) f = clean_url_hash(sample_url) DIRECTORY = os.path.join(COMPRESS_CACHE_DIRECTORY, d) # the www_sample_com directory should not exist until the file is cached self.assertFalse(os.path.exists(DIRECTORY)) self.assertNotIn(d, c.sites) c.save(sample_url, html_string) full_save_name = os.path.join(DIRECTORY, f) zip_name = "{}.gz".format(full_save_name) self.assertIn(full_save_name, c.sites[d]) self.assertTrue(os.path.exists(zip_name)) # remove this after the test is done shutil.rmtree(DIRECTORY)