def test_create_tile(self): self.tile_mgr.creator().create_tiles( [Tile((0, 0, 1)), Tile((1, 0, 1))]) eq_(self.file_cache.stored_tiles, set([(0, 0, 1), (1, 0, 1)])) eq_(sorted(self.source.requested), [((-180.0, -90.0, 0.0, 90.0), (256, 256), SRS(4326)), ((0.0, -90.0, 180.0, 90.0), (256, 256), SRS(4326))])
def test_remove(self): tile = self.create_tile((1, 0, 4)) self.create_cached_tile(tile) assert self.cache.is_cached(Tile((1, 0, 4))) self.cache.remove_tile(Tile((1, 0, 4))) assert not self.cache.is_cached(Tile((1, 0, 4)))
def test_bundle_header(self): t = Tile((5000, 1000, 12), ImageSource(BytesIO(b'a' * 4000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle')) def assert_header(tile_bytes_written, max_tile_bytes): with open(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle'), 'r+b') as f: header = struct.unpack('<4I3Q6I', f.read(64)) eq_(header[0], 3) # version eq_(header[1], 128*128) eq_(header[2], max_tile_bytes) eq_(header[5], 64 + 128*128*8 + sum(tile_bytes_written)) assert_header([4000 + 4], 4000) t = Tile((5000, 1001, 12), ImageSource(BytesIO(b'a' * 6000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert_header([4000 + 4, 6000 + 4], 6000) t = Tile((4992, 999, 12), ImageSource(BytesIO(b'a' * 1000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert_header([4000 + 4, 6000 + 4, 1000 + 4], 6000) t = Tile((5000, 1001, 12), ImageSource(BytesIO(b'a' * 3000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert_header([4000 + 4, 6000 + 4 + 3000 + 4, 1000 + 4], 6000) # still contains bytes from overwritten tile
def test_create_tile_multiple_fragmented(self, tile_mgr, mock_file_cache, mock_wms_client): tile_mgr.creator().create_tiles([Tile((4, 0, 3)), Tile((5, 2, 3))]) assert mock_file_cache.stored_tiles == \ set([(4, 0, 3), (4, 1, 3), (4, 2, 3), (5, 0, 3), (5, 1, 3), (5, 2, 3)]) assert sorted(mock_wms_client.requested) == \ [((-1.7578125, -90, 91.7578125, 46.7578125), (532, 778), SRS(4326))]
def test_bulk_get_multiple_meta_tiles(self, tile_mgr, mock_file_cache): tiles = tile_mgr.creator().create_tiles([Tile((1, 0, 2)), Tile((2, 0, 2))]) assert len(tiles) == 2*2*2 assert mock_file_cache.stored_tiles, set([ (0, 0, 2), (1, 0, 2), (0, 1, 2), (1, 1, 2), (2, 0, 2), (3, 0, 2), (2, 1, 2), (3, 1, 2), ])
def test_load_stored_tile(self): tile = self.create_tile((5, 12, 4)) self.cache.store_tile(tile) size = tile.size # check stored tile tile = Tile((5, 12, 4)) assert tile.source is None assert self.cache.load_tile(tile) if not self.always_loads_metadata: assert tile.source is not None assert tile.timestamp is None assert tile.size is None stored_size = len(tile.source.as_buffer().read()) assert stored_size == size # check loading of metadata (timestamp, size) tile = Tile((5, 12, 4)) assert tile.source is None assert self.cache.load_tile(tile, with_metadata=True) assert tile.source is not None if tile.timestamp: now = time.time() if self.uses_utc: now = calendar.timegm(datetime.datetime.utcnow().timetuple()) assert abs(tile.timestamp - now) <= 10 if tile.size: assert tile.size == size
def test_cleanup_sqlite(self): seed_conf = load_seed_tasks_conf(self.seed_conf_file, self.mapproxy_conf) cleanup_tasks = seed_conf.cleanups(['sqlite_cache']) cache = cleanup_tasks[0].tile_manager.cache cache.store_tile(self.create_tile((0, 0, 2))) cache.store_tile(self.create_tile((0, 0, 3))) assert cache.is_cached(Tile((0, 0, 2))) assert cache.is_cached(Tile((0, 0, 3))) assert_files_in_dir(os.path.join(self.dir, 'cache', 'sqlite_cache', 'GLOBAL_GEODETIC'), ['2.mbtile', '3.mbtile'], glob='*.mbtile') cleanup(cleanup_tasks, verbose=False, dry_run=False) # 3.mbtile file is still there assert_files_in_dir(os.path.join(self.dir, 'cache', 'sqlite_cache', 'GLOBAL_GEODETIC'), ['2.mbtile', '3.mbtile'], glob='*.mbtile') assert cache.is_cached(Tile((0, 0, 2))) assert not cache.is_cached(Tile((0, 0, 3)))
def test_cleanup_sqlite_remove_all(self): seed_conf = load_seed_tasks_conf(self.seed_conf_file, self.mapproxy_conf) cleanup_tasks = seed_conf.cleanups(['sqlite_cache_remove_all']) cache = cleanup_tasks[0].tile_manager.cache cache.store_tile(self.create_tile((0, 0, 2))) cache.store_tile(self.create_tile((0, 0, 3))) assert cache.is_cached(Tile((0, 0, 2))) assert cache.is_cached(Tile((0, 0, 3))) eq_( sorted( os.listdir( os.path.join(self.dir, 'cache', 'sqlite_cache', 'GLOBAL_GEODETIC'))), ['2.mbtile', '3.mbtile']) cleanup(cleanup_tasks, verbose=False, dry_run=False) # 3.mbtile file should be removed completely eq_( sorted( os.listdir( os.path.join(self.dir, 'cache', 'sqlite_cache', 'GLOBAL_GEODETIC'))), ['3.mbtile']) assert not cache.is_cached(Tile((0, 0, 2))) assert cache.is_cached(Tile((0, 0, 3)))
def test_load_stored_tile(self): tile = self.create_tile((5, 12, 4)) self.cache.store_tile(tile) size = tile.size # check stored tile tile = Tile((5, 12, 4)) assert tile.source is None assert self.cache.load_tile(tile) if not self.always_loads_metadata: assert tile.source is not None assert tile.timestamp is None assert tile.size is None stored_size = len(tile.source.as_buffer().read()) assert stored_size == size # check loading of metadata (timestamp, size) tile = Tile((5, 12, 4)) assert tile.source is None assert self.cache.load_tile(tile, with_metadata=True) assert tile.source is not None if tile.timestamp: assert timestamp_is_now(tile.timestamp, delta=10) if tile.size: assert tile.size == size
def test_bundle_header(self): t = Tile((5000, 1000, 12), ImageSource(BytesIO(b'a' * 4000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle')) assert os.path.exists(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundlx')) def assert_header(tile_bytes_written, max_tile_bytes): with open(os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle'), 'r+b') as f: header = struct.unpack('<lllllllllllllll', f.read(60)) eq_(header[11], 896) eq_(header[12], 1023) eq_(header[13], 4992) eq_(header[14], 5119) eq_(header[6], 60 + 128*128*4 + sum(tile_bytes_written)) eq_(header[2], max_tile_bytes) eq_(header[4], len(tile_bytes_written)*4) assert_header([4000 + 4], 4000) t = Tile((5000, 1001, 12), ImageSource(BytesIO(b'a' * 6000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert_header([4000 + 4, 6000 + 4], 6000) t = Tile((4992, 999, 12), ImageSource(BytesIO(b'a' * 1000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert_header([4000 + 4, 6000 + 4, 1000 + 4], 6000) t = Tile((5000, 1001, 12), ImageSource(BytesIO(b'a' * 3000), image_opts=ImageOptions(format='image/png'))) self.cache.store_tile(t) assert_header([4000 + 4, 6000 + 4 + 3000 + 4, 1000 + 4], 6000) # still contains bytes from overwritten tile
def test_defragmentation_min_percent(self): cache = self.cache_class(self.cache_dir) t = Tile((10000, 2000, 13), ImageSource( BytesIO(b'a' * 120 * 1024), image_opts=ImageOptions(format='image/png'))) cache.store_tile(t) for x in range(100): for _ in range(2 if x < 10 else 1): t = Tile((5000+x, 1000, 12), ImageSource( BytesIO(b'a' * 120 * 1024), image_opts=ImageOptions(format='image/png'))) cache.store_tile(t) logger = mockProgressLog() fname = os.path.join(self.cache_dir, 'L12', 'R0380C1380.bundle') before = os.path.getsize(fname) defrag_compact_cache(cache, log_progress=logger) assert len(logger.logs) == 2 eq_(logger.logs[0]['defrag'], False) eq_(logger.logs[1]['defrag'], False) after = os.path.getsize(fname) assert before == after logger = mockProgressLog() before = os.path.getsize(fname) defrag_compact_cache(cache, min_percent=0.08, log_progress=logger) assert len(logger.logs) == 2 eq_(logger.logs[0]['defrag'], True) eq_(logger.logs[1]['defrag'], False) after = os.path.getsize(fname) assert after < before
def test_load_more_than_2000_tiles(self): # prepare data for i in range(0, 2010): assert self.cache.store_tile(Tile((i, 0, 10), ImageSource(BytesIO(b'foo')))) tiles = [Tile((i, 0, 10)) for i in range(0, 2010)] assert self.cache.load_tiles(tiles)
def test_load_tiles_mixed(self): tile = self.create_tile((1, 0, 4)) self.create_cached_tile(tile) tiles = [Tile(None), Tile((0, 0, 4)), Tile((1, 0, 4))] assert self.cache.load_tiles(tiles) == False assert not tiles[0].is_missing() assert tiles[1].is_missing() assert not tiles[2].is_missing()
def test_create_tiles(self): self.tile_mgr.creator().create_tiles([Tile((0, 0, 2)), Tile((2, 0, 2))]) eq_(self.file_cache.stored_tiles, set([(0, 0, 2), (1, 0, 2), (0, 1, 2), (1, 1, 2), (2, 0, 2), (3, 0, 2), (2, 1, 2), (3, 1, 2)])) eq_(sorted(self.client.requested), [((-180.0, -90.0, 0.0, 90.0), (512, 512), SRS(4326)), ((0.0, -90.0, 180.0, 90.0), (512, 512), SRS(4326))])
def test_load_tiles_cached(self): self.cache.store_tile(self.create_tile((0, 0, 1))) self.cache.store_tile(self.create_tile((0, 1, 1))) tiles = [Tile((0, 0, 1)), Tile((0, 1, 1))] assert self.cache.load_tiles(tiles) assert not tiles[0].is_missing() assert not tiles[1].is_missing()
def test_create_tiles(self, tile_mgr, mock_file_cache, mock_wms_client): tile_mgr.creator().create_tiles([Tile((0, 0, 2)), Tile((2, 0, 2))]) assert mock_file_cache.stored_tiles == \ set([(0, 0, 2), (1, 0, 2), (0, 1, 2), (1, 1, 2), (2, 0, 2), (3, 0, 2), (2, 1, 2), (3, 1, 2)]) assert sorted(mock_wms_client.requested) == \ [((-180.0, -90.0, 0.0, 90.0), (512, 512), SRS(4326)), ((0.0, -90.0, 180.0, 90.0), (512, 512), SRS(4326))]
def _create_meta_tile(self, meta_tile): main_tile = Tile(meta_tile.main_tile_coord) with self.tile_locker(main_tile): if not all(self.is_cached(t) for t in meta_tile.tiles if t is not None): self._create_renderd_tile(main_tile.coord) tiles = [Tile(coord) for coord in meta_tile.tiles] self.cache.load_tiles(tiles) return tiles
def test_create_tile_multiple_fragmented(self): self.tile_mgr.creator().create_tiles( [Tile((4, 0, 3)), Tile((5, 2, 3))]) eq_( self.file_cache.stored_tiles, set([(4, 0, 3), (4, 1, 3), (4, 2, 3), (5, 0, 3), (5, 1, 3), (5, 2, 3)])) eq_(sorted(self.client.requested), [((-1.7578125, -90, 91.7578125, 46.7578125), (532, 778), SRS(4326))])
def test_remove(self): tile = self.create_tile((1, 0, 4)) self.create_cached_tile(tile) assert self.cache.is_cached(Tile((1, 0, 4))) self.cache.remove_tile(Tile((1, 0, 4))) assert not self.cache.is_cached(Tile((1, 0, 4))) # check if we can recreate a removed tile tile = self.create_tile((1, 0, 4)) self.create_cached_tile(tile) assert self.cache.is_cached(Tile((1, 0, 4)))
def test_store_tile_already_stored(self): # tile object is marked as stored, # check that is is not stored 'again' # (used for disable_storage) tile = Tile((0, 0, 4), ImageSource(BytesIO(b'foo'))) tile.stored = True self.cache.store_tile(tile) assert self.cache.is_cached(tile) tile = Tile((0, 0, 4)) assert not self.cache.is_cached(tile)
def test_missing_tiles(self): self.cache.store_tile(self.create_tile(coord=(130, 200, 8))) assert os.path.exists(os.path.join(self.cache_dir, 'L08', 'R0080C0080.bundle')) # test that all other tiles in this bundle are missing assert self.cache.is_cached(Tile((130, 200, 8))) for x in range(128, 255): for y in range(128, 255): if x == 130 and y == 200: continue assert not self.cache.is_cached(Tile((x, y, 8))), (x, y) assert not self.cache.load_tile(Tile((x, y, 8))), (x, y)
def test_bulk_store_tiles_with_different_levels(self): self.cache.store_tiles([ self.create_tile((0, 0, 1)), self.create_tile((0, 0, 2)), self.create_tile((1, 0, 2)), self.create_tile((1, 0, 1)), ]) eq_(sorted(os.listdir(self.cache_dir)), ['1.gpkg', '2.gpkg']) assert self.cache.is_cached(Tile((0, 0, 1))) assert self.cache.is_cached(Tile((1, 0, 1))) assert self.cache.is_cached(Tile((0, 0, 2))) assert self.cache.is_cached(Tile((1, 0, 2)))
def test_merged_tiles(self, tile_mgr, mock_file_cache, source_base, source_overlay): tiles = tile_mgr.creator().create_tiles([Tile((0, 0, 1)), Tile((1, 0, 1))]) assert mock_file_cache.stored_tiles == set([(0, 0, 1), (1, 0, 1)]) assert source_base.requested == \ [((-180.0, -90.0, 180.0, 90.0), (512, 256), SRS(4326))] assert source_overlay.requested == \ [((-180.0, -90.0, 180.0, 90.0), (512, 256), SRS(4326))] hist = tiles[0].source.as_image().histogram() # lots of red (base), but not everything (overlay) assert 55000 < hist[255] < 60000 # red = 0xff assert 55000 < hist[256] # green = 0x00 assert 55000 < hist[512] # blue = 0x00
def test_expire(self): cache = RedisCache(self.host, int(self.port), prefix='mapproxy-test', db=1, ttl=0) t1 = self.create_tile(coord=(9382, 1234, 9)) assert cache.store_tile(t1) time.sleep(0.1) t2 = Tile(t1.coord) assert cache.is_cached(t2) cache = RedisCache(self.host, int(self.port), prefix='mapproxy-test', db=1, ttl=0.05) t1 = self.create_tile(coord=(5382, 2234, 9)) assert cache.store_tile(t1) time.sleep(0.1) t2 = Tile(t1.coord) assert not cache.is_cached(t2)
def test_bulk_store_tiles_with_different_levels(self): self.cache.store_tiles([ self.create_tile((0, 0, 1)), self.create_tile((0, 0, 2)), self.create_tile((1, 0, 2)), self.create_tile((1, 0, 1)), ]) assert_files_in_dir(self.cache_dir, ['1.mbtile', '2.mbtile'], glob='*.mbtile') assert self.cache.is_cached(Tile((0, 0, 1))) assert self.cache.is_cached(Tile((1, 0, 1))) assert self.cache.is_cached(Tile((0, 0, 2))) assert self.cache.is_cached(Tile((1, 0, 2)))
def test_remove_level_tiles_before(self): self.cache.store_tile(self.create_tile((0, 0, 1))) self.cache.store_tile(self.create_tile((0, 0, 2))) eq_(sorted(os.listdir(self.cache_dir)), ['1.mbtile', '2.mbtile']) assert self.cache.is_cached(Tile((0, 0, 1))) self.cache.remove_level_tiles_before(1, timestamp=time.time() - 60) assert self.cache.is_cached(Tile((0, 0, 1))) self.cache.remove_level_tiles_before(1, timestamp=time.time() + 60) assert not self.cache.is_cached(Tile((0, 0, 1))) eq_(sorted(os.listdir(self.cache_dir)), ['1.mbtile', '2.mbtile']) assert self.cache.is_cached(Tile((0, 0, 2)))
def test_get_single(self): self.tile_mgr.creator().create_tiles([Tile((0, 0, 1))]) eq_(self.file_cache.stored_tiles, set([(0, 0, 1)])) eq_(self.source_base.requested, [((-180.0, -90.0, 0.0, 90.0), (256, 256), SRS(4326))]) eq_(self.source_overlay.requested, [((-180.0, -90.0, 0.0, 90.0), (256, 256), SRS(4326))])
def test_create_tile_single(self, tile_mgr, mock_file_cache, mock_wms_client): # not enabled for single tile requests tile_mgr.creator().create_tiles([Tile((0, 0, 2))]) assert mock_file_cache.stored_tiles == \ set([(0, 0, 2), (0, 1, 2), (1, 0, 2), (1, 1, 2)]) assert sorted(mock_wms_client.requested) == \ [((-180.0, -90.0, 3.515625, 90.0), (522, 512), SRS(4326))]
def test_get_single(self, tile_mgr, mock_file_cache, source_base, source_overlay): tile_mgr.creator().create_tiles([Tile((0, 0, 1))]) assert mock_file_cache.stored_tiles == set([(0, 0, 1)]) assert source_base.requested == \ [((-180.0, -90.0, 0.0, 90.0), (256, 256), SRS(4326))] assert source_overlay.requested == \ [((-180.0, -90.0, 0.0, 90.0), (256, 256), SRS(4326))]
def test_bulk_get_multiple_meta_tiles(self): tiles = self.tile_mgr.creator().create_tiles( [Tile((1, 0, 2)), Tile((2, 0, 2))]) eq_(len(tiles), 2 * 2 * 2) eq_( self.file_cache.stored_tiles, set([ (0, 0, 2), (1, 0, 2), (0, 1, 2), (1, 1, 2), (2, 0, 2), (3, 0, 2), (2, 1, 2), (3, 1, 2), ]))
def test_store_bulk_with_overwrite(self): tile = self.create_tile((0, 0, 4)) self.create_cached_tile(tile) assert self.cache.is_cached(Tile((0, 0, 4))) loaded_tile = Tile((0, 0, 4)) assert self.cache.load_tile(loaded_tile) assert loaded_tile.source_buffer().read() == tile.source_buffer().read() assert not self.cache.is_cached(Tile((1, 0, 4))) tiles = [self.create_another_tile((x, 0, 4)) for x in range(2)] assert self.cache.store_tiles(tiles) assert self.cache.is_cached(Tile((0, 0, 4))) loaded_tile = Tile((0, 0, 4)) assert self.cache.load_tile(loaded_tile) # check that tile is overwritten assert loaded_tile.source_buffer().read() != tile.source_buffer().read() assert loaded_tile.source_buffer().read() == tiles[0].source_buffer().read()
def test_load_tile_cached(self): tile = self.create_tile() self.create_cached_tile(tile) tile = Tile((0, 0, 4)) assert self.cache.load_tile(tile) == True assert not tile.is_missing()
def test_load_tile_not_cached(self): tile = Tile((0, 0, 4)) assert not self.cache.load_tile(tile) assert tile.source is None assert tile.is_missing()