class TestImageCache(unittest.TestCase): def setUp(self): self.ic = ImageCache() def tearDown(self): pass def test_ic_construction(self): self.assertIsInstance(self.ic, ImageCache) def test_ic_get_table_name(self): self.assertIsInstance(self.ic.get_table(), str)
async def gen_database(path: str, fast: bool) -> None: """ Takes in a target directory and computes information about the images contained therin """ ic = ImageCache(fast=fast) await ic.gen_cache_from_directory(path) report = {} queries = { #"all_data": "SELECT * FROM {};", "image_types": "SELECT COUNT(DISTINCT img_type) FROM {};", "total_images": "SELECT COUNT(*) FROM {};", "average_size": "SELECT AVG(size) FROM {};", "total_size": "SELECT SUM(size) FROM {};", } for k, v in queries.items(): rows = ic.query(v.format(ic.get_table())) report[k] = rows[0][0] # Get duplicate and ambiguous images report['duplicates'] = ic.get_duplicates() report['ambiguous'] = ic.get_ambiguous() report["process_time"] = ic.processing_time pp = pprint.PrettyPrinter(indent=2, compact=False) pp.pprint(report) logger.info("Completed database generation.") logger.info( f"Processed {ic.get_count()} images in {ic.processing_time} seconds.") logger.info(f"Encountered {len(report['duplicates'])} duplicate images.") tstamp = datetime.datetime.now().strftime("gen_database_%Y-%m-%d.json") with open(tstamp, 'w') as fout: fout.write(json.dumps(report)) logger.info(f"Report written to {tstamp}")