def test_non_matching_bboxfor_origins(self): g1 = tile_grid(SRS(21781), bbox=[420000, 30000, 900000, 360000], res=[250], origin='nw') g2 = tile_grid(SRS(21781), bbox=[420000, 30000, 900000, 360000], res=[250], origin='sw') assert not g1.is_subset_of(g2)
def test_basic_mixed_name(self): grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ll') assert grid.supports_access_with_origin('sw') assert not grid.supports_access_with_origin('nw') grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ul') assert not grid.supports_access_with_origin('sw') assert grid.supports_access_with_origin('nw')
def test_epsg_4326_bbox_from_sqrt2(self): base = tile_grid(srs='epsg:4326', res_factor='sqrt2') bbox = (10.0, -20.0, 40.0, 10.0) sub = tile_grid(align_with=base, bbox=bbox, res_factor=2.0) eq_(sub.bbox, bbox) eq_(sub.resolution(0), base.resolution(8)) eq_(sub.resolution(1), base.resolution(10)) eq_(sub.resolution(2), base.resolution(12))
def test_custom_res_without_match(self): grid = tile_grid(4326, bbox=(0, 0, 1024, 1023), origin='ll', res=[1, 0.5, 0.25]) assert grid.supports_access_with_origin('ll') assert not grid.supports_access_with_origin('ul') grid = tile_grid(4326, bbox=(0, 0, 1024, 1023), origin='ul', res=[1, 0.5, 0.25]) assert not grid.supports_access_with_origin('ll') assert grid.supports_access_with_origin('ul')
def test_basic_no_level_zero(self): grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ll', min_res=360/256/2) assert grid.supports_access_with_origin('ll') assert grid.supports_access_with_origin('ul') grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ul', min_res=360/256/2) assert grid.supports_access_with_origin('ll') assert grid.supports_access_with_origin('ul')
def test_custom_without_match(self): # height is not divisible by res*tile_size grid = tile_grid(4326, bbox=(0, 0, 1024, 1000), origin='ll', min_res=1) assert grid.supports_access_with_origin('ll') assert not grid.supports_access_with_origin('ul') grid = tile_grid(4326, bbox=(0, 0, 1024, 1000), origin='ul', min_res=1) assert not grid.supports_access_with_origin('ll') assert grid.supports_access_with_origin('ul')
def test_res_subset(self): g1 = tile_grid(SRS(3857), res=[50000, 10000, 100, 1]) g2 = tile_grid(SRS(3857), res=[100000, 50000, 10000, 1000, 100, 10, 1, 0.5]) assert g1.tile_bbox((0, 0, 0)) != g2.tile_bbox((0, 0, 0)) assert g1.is_subset_of(g2) g1 = tile_grid(SRS(3857), bbox=[0, 0, 20037508.342789244, 20037508.342789244], min_res=78271.51696402048, num_levels=18) g2 = tile_grid(SRS(3857), origin='nw') assert g1.is_subset_of(g2)
def test_epsg_4326_bbox(self): base = tile_grid(srs='epsg:4326') bbox = (10.0, -20.0, 40.0, 10.0) sub = tile_grid(align_with=base, bbox=bbox) eq_(sub.bbox, bbox) eq_(sub.resolution(0), 180 / 256 / 8) abbox, grid_size, tiles = sub.get_affected_level_tiles(bbox, 0) eq_(abbox, (10.0, -20.0, 55.0, 25.0)) eq_(grid_size, (2, 2)) eq_(list(tiles), [(0, 1, 0), (1, 1, 0), (0, 0, 0), (1, 0, 0)])
def test_epsg_4326_bbox(self): base = tile_grid(srs='epsg:4326') bbox = (10.0, -20.0, 40.0, 10.0) sub = tile_grid(align_with=base, bbox=bbox) eq_(sub.bbox, bbox) eq_(sub.resolution(0), 180/256/8) abbox, grid_size, tiles = sub.get_affected_level_tiles(bbox, 0) eq_(abbox, (10.0, -20.0, 55.0, 25.0)) eq_(grid_size, (2, 2)) eq_(list(tiles), [(0, 1, 0), (1, 1, 0), (0, 0, 0), (1, 0, 0)])
def test_epsg_4326_bbox_to_sqrt2(self): base = tile_grid(srs='epsg:4326', res_factor=2.0) bbox = (10.0, -20.0, 40.0, 10.0) sub = tile_grid(align_with=base, bbox=bbox, res_factor='sqrt2') eq_(sub.bbox, bbox) eq_(sub.resolution(0), base.resolution(4)) eq_(sub.resolution(2), base.resolution(5)) eq_(sub.resolution(4), base.resolution(6)) assert sub.resolution(0) > sub.resolution(1) > sub.resolution(3) eq_(sub.resolution(3)/2, sub.resolution(5))
def test_epsg_4326_bbox_to_sqrt2(self): base = tile_grid(srs='epsg:4326', res_factor=2.0) bbox = (10.0, -20.0, 40.0, 10.0) sub = tile_grid(align_with=base, bbox=bbox, res_factor='sqrt2') eq_(sub.bbox, bbox) eq_(sub.resolution(0), base.resolution(4)) eq_(sub.resolution(2), base.resolution(5)) eq_(sub.resolution(4), base.resolution(6)) assert sub.resolution(0) > sub.resolution(1) > sub.resolution(3) eq_(sub.resolution(3) / 2, sub.resolution(5))
def test_basic_no_level_zero(self): grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ll', min_res=360 / 256 / 2) assert grid.supports_access_with_origin('ll') assert grid.supports_access_with_origin('ul') grid = tile_grid(4326, bbox=(-180, -90, 180, 90), origin='ul', min_res=360 / 256 / 2) assert grid.supports_access_with_origin('ll') assert grid.supports_access_with_origin('ul')
def merge_tiles(couchdb, destination, level, bbox, matrix_set='GoogleMapsCompatible', origin='nw', overlay=False, format='GTiff', srs='EPSG:3857', gdal_translate_bin='/usr/bin/gdal_translate', gdalwarp_bin='/usr/bin/gdalwarp'): grid = tile_grid(3857, origin=origin, name=matrix_set) res = grid.resolution(level) ll_ur, xy_res, tiles = grid.get_affected_level_tiles(bbox, level) width = int(abs(bbox_width(bbox)) / res) or 1 height = int(abs(bbox_height(bbox)) / res) or 1 mode = 'RGBA' if overlay else 'RGB' _merge_tiles(load_tiles_from_couchdb(couchdb, tiles, grid, matrix_set), bbox, (width, height), res, mode=mode, t_format=format, t_name=destination, t_srs=srs, gdal_translate_bin=gdal_translate_bin, gdalwarp_bin=gdalwarp_bin)
def test_fixed_values(self): template = CouchDBMDTemplate({'hello': 'world', 'foo': 123}) doc = template.doc(Tile((0, 0, 1)), tile_grid(4326)) assert doc['timestamp'] == pytest.approx(time.time(), 0.1) assert doc['hello'] == 'world' assert doc['foo'] == 123
def create_mbtiles_export_seed_task(export_task, app_state): grid = DEFAULT_GRID export_grid = tile_grid('EPSG:3857', origin='sw') source = create_couchdb_source(export_task.layer, app_state, grid) export_filename = app_state.user_data_path( 'export', export_task.project.title, export_task.layer.wmts_source.name + '.mbtiles', make_dirs=True) cache = create_mbtiles_export_cache(export_filename, export_task.layer.wmts_source, app_state) tile_mgr = create_tile_manager(format=export_task.layer.wmts_source.format, cache=cache, sources=[source], grid=export_grid) source_coverage = coverage_from_geojson( export_task.layer.wmts_source.download_coverage) export_coverage = coverage_from_geojson(export_task.coverage) coverage = coverage_intersection(source_coverage, export_coverage) if not coverage: return None levels = range(export_task.zoom_level_start, export_task.zoom_level_end + 1) return create_seed_task(tile_mgr, coverage, levels)
def test_fixed_values(self): template = CouchDBMDTemplate({'hello': 'world', 'foo': 123}) doc = template.doc(Tile((0, 0, 1)), tile_grid(4326)) assert_almost_equal(doc['timestamp'], time.time(), 2) eq_(doc['hello'], 'world') eq_(doc['foo'], 123)
def setup(self): TileCacheTestBase.setup(self) self.cache = GeopackageLevelCache( self.cache_dir, tile_grid=tile_grid(3857, name='global-webmarcator'), table_name='test_tiles', )
def load_expire_tiles(expire_dir, grid=None): if grid is None: grid = tile_grid(3857, origin='nw') tiles = set() def parse(filename): with open(filename) as f: try: for line in f: if not line: continue tile = tuple(map(int, line.split('/'))) tiles.add(tile) except: log_config.warn('found error in %s, skipping rest of file', filename) if os.path.isdir(expire_dir): for root, dirs, files in os.walk(expire_dir): for name in files: filename = os.path.join(root, name) parse(filename) else: parse(expire_dir) boxes = [] for tile in tiles: z, x, y = tile boxes.append(shapely.geometry.box(*grid.tile_bbox((x, y, z)))) return boxes
def get_chunked_bbox(bbox, size: tuple = None, level: int = None): """ Chunks a bbox into a grid of sub-bboxes. :param bbox: bbox in 4326, representing the area of the world to be chunked :param size: optional image size to use when calculating the resolution. :param level: The level to use for the affected level. :return: enclosing bbox of the area, dimensions of the grid, bboxes of all tiles. """ from eventkit_cloud.utils.image_snapshot import get_resolution_for_extent # Calculate the starting res for our custom grid # This is the same method we used when taking snap shots for data packs resolution = get_resolution_for_extent(bbox, size) # Make a subgrid of 4326 that spans the extent of the provided bbox # min res specifies the starting zoom level mapproxy_grid = tile_grid(srs=4326, bbox=bbox, bbox_srs=4326, origin="ul", min_res=resolution) # bbox is the bounding box of all tiles affected at the given level, unused here # size is the x, y dimensions of the grid # tiles at level is a generator that returns the tiles in order tiles_at_level = mapproxy_grid.get_affected_level_tiles(bbox, 0)[2] # convert the tiles to bboxes representing the tiles on the map return [mapproxy_grid.tile_bbox(_tile) for _tile in tiles_at_level]
def setup(self): TileCacheTestBase.setup(self) self.gpkg_file = os.path.join(self.cache_dir, 'tmp.gpkg') self.table_name = 'test_tiles' self.cache = GeopackageCache( self.gpkg_file, tile_grid=tile_grid(3857, name='global-webmarcator'), table_name=self.table_name, )
def test_bbox(self): grid = tile_grid(4326) template = TileURLTemplate(TESTSERVER_URL + '/service?BBOX=%(bbox)s') client = TileClient(template, grid=grid) with mock_httpd(TESTSERVER_ADDRESS, [({'path': '/service?BBOX=-180.00000000,0.00000000,-90.00000000,90.00000000'}, {'body': b'tile', 'headers': {'content-type': 'image/png'}})]): resp = client.get_tile((0, 1, 2)).source.read() eq_(resp, b'tile')
def test_bbox(self): grid = tile_grid(4326) template = TileURLTemplate(TESTSERVER_URL + '/service?BBOX=%(bbox)s') client = TileClient(template, grid=grid) with mock_httpd(TESTSERVER_ADDRESS, [({'path': '/service?BBOX=-180.00000000,0.00000000,-90.00000000,90.00000000'}, {'body': b'tile', 'headers': {'content-type': 'image/png'}})]): resp = client.get_tile((0, 1, 2)).source.read() assert resp == b'tile'
def render(self, query): mapfile = self.mapfile if '%(webmercator_level)' in mapfile: _bbox, level = tile_grid(3857).get_affected_bbox_and_level( query.bbox, query.size, req_srs=query.srs) mapfile = mapfile % {'webmercator_level': level} if self.lock: with self.lock(): return self.render_mapfile(mapfile, query) else: return self.render_mapfile(mapfile, query)
def setup(self): if not os.environ.get(self.riak_url_env): raise SkipTest() riak_url = os.environ[self.riak_url_env] db_name = "mapproxy_test_%d" % random.randint(0, 100000) TileCacheTestBase.setup(self) self.cache = RiakCache( riak_url, db_name, "riak", tile_grid=tile_grid(3857, name="global-webmarcator"), lock_dir=self.cache_dir )
def merge_tiles(couchdb, destination, level, bbox, matrix_set='GoogleMapsCompatible', origin='nw', overlay=False, format='GTiff', srs='EPSG:3857', gdal_translate_bin='/usr/bin/gdal_translate', gdalwarp_bin='/usr/bin/gdalwarp'): grid = tile_grid(3857, origin=origin, name=matrix_set) res = grid.resolution(level) ll_ur, xy_res, tiles = grid.get_affected_level_tiles(bbox, level) width = int(abs(bbox_width(bbox)) / res) or 1 height = int(abs(bbox_height(bbox)) / res) or 1 mode = 'RGBA' if overlay else 'RGB' _merge_tiles( load_tiles_from_couchdb(couchdb, tiles, grid, matrix_set), bbox, (width, height), res, mode=mode, t_format=format, t_name=destination, t_srs=srs, gdal_translate_bin=gdal_translate_bin, gdalwarp_bin=gdalwarp_bin)
def setup(self): if not os.environ.get('MAPPROXY_TEST_COUCHDB'): raise SkipTest() couch_address = os.environ['MAPPROXY_TEST_COUCHDB'] db_name = 'mapproxy_test_%d' % random.randint(0, 100000) TileCacheTestBase.setup(self) md_template = CouchDBMDTemplate({'row': '{{y}}', 'tile_column': '{{x}}', 'zoom': '{{level}}', 'time': '{{timestamp}}', 'coord': '{{wgs_tile_centroid}}'}) self.cache = CouchDBCache(couch_address, db_name, file_ext='png', tile_grid=tile_grid(3857, name='global-webmarcator'), md_template=md_template)
def test_template_values(self): template = CouchDBMDTemplate({'row': '{{y}}', 'tile_column': '{{x}}', 'zoom': '{{level}}', 'time': '{{timestamp}}', 'coord': '{{wgs_tile_centroid}}', 'datetime': '{{utc_iso}}', 'coord_webmerc': '{{tile_centroid}}'}) doc = template.doc(Tile((1, 0, 2)), tile_grid(3857)) assert_almost_equal(doc['time'], time.time(), 2) assert 'timestamp' not in doc eq_(doc['row'], 0) eq_(doc['tile_column'], 1) eq_(doc['zoom'], 2) assert_almost_equal(doc['coord'][0], -45.0) assert_almost_equal(doc['coord'][1], -79.17133464081945) assert_almost_equal(doc['coord_webmerc'][0], -5009377.085697311) assert_almost_equal(doc['coord_webmerc'][1], -15028131.257091932) assert re.match('20\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ', doc['datetime']), doc['datetime']
def setup(self): url = os.environ[self.riak_url_env] urlparts = urlparse.urlparse(url) protocol = urlparts.scheme.lower() node = {'host': urlparts.hostname} if ':' in urlparts.hostname: if protocol == 'pbc': node['pb_port'] = urlparts.port if protocol in ('http', 'https'): node['http_port'] = urlparts.port db_name = 'mapproxy_test_%d' % random.randint(0, 100000) TileCacheTestBase.setup(self) self.cache = RiakCache([node], protocol, db_name, tile_grid=tile_grid(3857, name='global-webmarcator'))
def __init__(self, request_bbox=[], grid_bbox=[], level=None, grid_srs=None, grid_bbox_srs=None, map_srs=None, res=[], scales=[], origin='ll', units='m', dpi=None): self.grid_srs = SRS(grid_srs) if grid_srs else None self.grid_bbox_srs = SRS(grid_bbox_srs) if grid_bbox_srs else None self.map_srs = SRS(map_srs) if map_srs else None self.request_bbox = map(float, request_bbox) if request_bbox else None self.origin = origin self._res = list(map(float, res)) if res else None self._scales = list(map(float, scales)) if scales else None self._units = 1 if units == 'm' else constants.UNIT_FACTOR self._dpi = float(dpi) if dpi else constants.OGC_DPI _grid_bbox = list(map(float, grid_bbox)) if grid_bbox else None self.grid_bbox = self.transform_grid_bbox(_grid_bbox) if self._res: self._num_levels = len(self._res) elif self._scales: self._num_levels = len(self._scales) else: self._num_levels = None try: self.level = int(level) except TypeError: self.level = None if self.grid_srs: self.tilegrid = tile_grid(srs=self.grid_srs, bbox=_grid_bbox, bbox_srs=self.grid_bbox_srs, origin=self.origin, res=self.res, num_levels=self._num_levels) self.validate_tile_bbox_for_level_0() else: self.tilegrid = None
def estimate_project_tiles(coverage, wmts_source, levels): if wmts_source.download_coverage: wmts_source_coverage = coverage_from_geojson(wmts_source.download_coverage) else: wmts_source_coverage = make_coverage( shapely.geometry.Polygon([ (-20037508.34, -20037508.34), (-20037508.34, 20037508.34), (20037508.34, 20037508.34), (20037508.34, -20037508.34) ]), SRS(3857)) coverage_intersection = wmts_source_coverage.geom.intersection(coverage.geom) if not coverage_intersection: return 0 intersection = make_coverage(coverage_intersection, SRS(3857)) tiles = estimate_tiles(tile_grid(3857), levels, intersection) return tiles
def estimate_project_tiles(coverage, wmts_source, levels): if wmts_source.download_coverage: wmts_source_coverage = coverage_from_geojson( wmts_source.download_coverage) else: wmts_source_coverage = make_coverage( shapely.geometry.Polygon([(-20037508.34, -20037508.34), (-20037508.34, 20037508.34), (20037508.34, 20037508.34), (20037508.34, -20037508.34)]), SRS(3857)) coverage_intersection = wmts_source_coverage.geom.intersection( coverage.geom) if not coverage_intersection: return 0 intersection = make_coverage(coverage_intersection, SRS(3857)) tiles = estimate_tiles(tile_grid(3857), levels, intersection) return tiles
def test_minimal_tiles_fragmented_ul(self): self.mgrid = MetaGrid(grid=tile_grid('EPSG:4326', origin='ul'), meta_size=(2, 2), meta_buffer=10) sgrid = self.mgrid.minimal_meta_tile( [ (2, 0, 3), (1, 1, 3), (2, 2, 3), ]) eq_(sgrid.grid_size, (2, 3)) eq_(list(sgrid.tile_patterns), [ ((1, 0, 3), (10, 0)), ((2, 0, 3), (266, 0)), ((1, 1, 3), (10, 256)), ((2, 1, 3), (266, 256)), ((1, 2, 3), (10, 512)), ((2, 2, 3), (266, 512)), ] ) eq_(sgrid.bbox, (-136.7578125, -46.7578125, -43.2421875, 90.0))
def create_mbtiles_export_seed_task(export_task, app_state): grid = DEFAULT_GRID export_grid = tile_grid('EPSG:3857', origin='sw') source = create_couchdb_source(export_task.layer, app_state, grid) export_filename = app_state.user_data_path('export', export_task.project.title, export_task.layer.wmts_source.name + '.mbtiles', make_dirs=True) cache = create_mbtiles_export_cache(export_filename, export_task.layer.wmts_source, app_state) tile_mgr = create_tile_manager(format=export_task.layer.wmts_source.format, cache=cache, sources=[source], grid=export_grid) source_coverage = coverage_from_geojson(export_task.layer.wmts_source.download_coverage) export_coverage = coverage_from_geojson(export_task.coverage) coverage = coverage_intersection(source_coverage, export_coverage) levels = range(export_task.zoom_level_start, export_task.zoom_level_end + 1) return create_seed_task(tile_mgr, coverage, levels)
def data_volume(): project_coverage = coverage_from_feature_collection( json.loads(request.form['coverage'])) total_tiles = 0 volume = 0 if project_coverage: for raster_source in json.loads(request.form['raster_data']): wmts_source = None if request.args.get('export', 'false').lower() == 'true': local_source = g.db.query(model.LocalWMTSSource).get( raster_source['source_id']) if local_source: wmts_source = local_source.wmts_source else: wmts_source = g.db.query(model.ExternalWMTSSource).get( raster_source['source_id']) if wmts_source.download_coverage: wmts_source_coverage = coverage_from_geojson( wmts_source.download_coverage) else: wmts_source_coverage = make_coverage( shapely.geometry.Polygon([(-20037508.34, -20037508.34), (-20037508.34, 20037508.34), (20037508.34, 20037508.34), (20037508.34, -20037508.34)]), SRS(3857)) coverage_intersection = wmts_source_coverage.geom.intersection( project_coverage.geom) if not coverage_intersection: continue intersection = make_coverage(coverage_intersection, SRS(3857)) levels = range(raster_source['start_level'], raster_source['end_level'] + 1) source_tiles = estimate_tiles(tile_grid(3857), levels, intersection) volume += source_tiles * 15 total_tiles += source_tiles return jsonify(total_tiles=total_tiles, volume_mb=volume / 1024.0)
def setup(self): if not os.environ.get(self.riak_url_env): raise SkipTest() url = os.environ[self.riak_url_env] urlparts = urlparse.urlparse(url) protocol = urlparts.scheme.lower() node = {'host': urlparts.hostname} if ':' in urlparts.hostname: if protocol == 'pbc': node['pb_port'] = urlparts.port if protocol in ('http', 'https'): node['http_port'] = urlparts.port db_name = 'mapproxy_test_%d' % random.randint(0, 100000) TileCacheTestBase.setup(self) self.cache = RiakCache([node], protocol, db_name, tile_grid=tile_grid(3857, name='global-webmarcator'))
def calculate_tiles(name, srs, bbox, bbox_srs, origin, res=None, scales=None, dpi=constants.OGC_DPI, units=1): if res is None and scales is not None: res = [ round(scale_to_res(scale, dpi, units), defaults.DECIMAL_PLACES) for scale in scales ] tilegrid = tile_grid(srs=srs, bbox=bbox, bbox_srs=bbox_srs, res=res, origin=origin, name=name) result = [] for level, res in enumerate(tilegrid.resolutions): tiles_in_x, tiles_in_y = tilegrid.grid_sizes[level] total_tiles = tiles_in_x * tiles_in_y result.append({ 'level': level, 'resolution': res, 'scale': scales[level] if scales else res_to_scale(res, dpi, units), 'tiles_in_x': tiles_in_x, 'tiles_in_y': tiles_in_y, 'total_tiles': total_tiles }) return result
def data_volume(): project_coverage = coverage_from_feature_collection(json.loads(request.form["coverage"])) total_tiles = 0 volume = 0 if project_coverage: for raster_source in json.loads(request.form["raster_data"]): wmts_source = None if request.args.get("export", "false").lower() == "true": local_source = g.db.query(model.LocalWMTSSource).get(raster_source["source_id"]) if local_source: wmts_source = local_source.wmts_source else: wmts_source = g.db.query(model.ExternalWMTSSource).get(raster_source["source_id"]) if wmts_source.download_coverage: wmts_source_coverage = coverage_from_geojson(wmts_source.download_coverage) else: wmts_source_coverage = make_coverage( shapely.geometry.Polygon( [ (-20037508.34, -20037508.34), (-20037508.34, 20037508.34), (20037508.34, 20037508.34), (20037508.34, -20037508.34), ] ), SRS(3857), ) coverage_intersection = wmts_source_coverage.geom.intersection(project_coverage.geom) if not coverage_intersection: continue intersection = make_coverage(coverage_intersection, SRS(3857)) levels = range(raster_source["start_level"], raster_source["end_level"] + 1) source_tiles = estimate_tiles(tile_grid(3857), levels, intersection) volume += source_tiles * 15 total_tiles += source_tiles return jsonify(total_tiles=total_tiles, volume_mb=volume / 1024.0)
def calculate_tiles(name, srs, bbox, bbox_srs, origin, res=None, scales=None, dpi=constants.OGC_DPI, units=1): if res is None and scales is not None: res = [round(scale_to_res(scale, dpi, units), defaults.DECIMAL_PLACES) for scale in scales] tilegrid = tile_grid( srs=srs, bbox=bbox, bbox_srs=bbox_srs, res=res, origin=origin, name=name) result = [] for level, res in enumerate(tilegrid.resolutions): tiles_in_x, tiles_in_y = tilegrid.grid_sizes[level] total_tiles = tiles_in_x * tiles_in_y result.append({ 'level': level, 'resolution': res, 'scale': scales[level] if scales else res_to_scale(res, dpi, units), 'tiles_in_x': tiles_in_x, 'tiles_in_y': tiles_in_y, 'total_tiles': total_tiles }) return result
def test_template_values(self): template = CouchDBMDTemplate({ 'row': '{{y}}', 'tile_column': '{{x}}', 'zoom': '{{level}}', 'time': '{{timestamp}}', 'coord': '{{wgs_tile_centroid}}', 'datetime': '{{utc_iso}}', 'coord_webmerc': '{{tile_centroid}}' }) doc = template.doc(Tile((1, 0, 2)), tile_grid(3857)) assert doc['time'] == pytest.approx(time.time(), 0.1) assert 'timestamp' not in doc assert doc['row'] == 0 assert doc['tile_column'] == 1 assert doc['zoom'] == 2 assert doc['coord'][0] == pytest.approx(-45.0) assert doc['coord'][1] == pytest.approx(-79.17133464081945) assert doc['coord_webmerc'][0] == pytest.approx(-5009377.085697311) assert doc['coord_webmerc'][1] == pytest.approx(-15028131.25709193) assert re.match(r'20\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ', doc['datetime']), doc['datetime']
def setup(self): self.mgrid = MetaGrid(grid=tile_grid('EPSG:4326'), meta_size=(2, 2), meta_buffer=10)
if coverage.geom: coverage_area = coverage.geom.area else: coverage_area = _area_from_bbox(coverage.bbox) return coverage_area / grid_area def estimate_tiles(grid, levels, coverage=None): if coverage: ratio = grid_coverage_ratio(grid.bbox, grid.srs, coverage) else: ratio = 1 tiles = 0 for level in levels: grid_size = grid.grid_sizes[level] level_tiles = grid_size[0] * grid_size[1] level_tiles = int(math.ceil(level_tiles * ratio)) tiles += level_tiles return tiles if __name__ == '__main__': from mapproxy.srs import SRS from mapproxy.grid import tile_grid from mapproxy.util.coverage import BBOXCoverage print estimate_tiles(tile_grid(3857), levels=range(12), coverage=BBOXCoverage([5, 50, 10, 60], SRS(4326)))
def __init__(self, name): self.requested = False self.name = name self.grid = tile_grid(900913)
def setup(self): self.grid = tile_grid('EPSG:4326', res_factor='sqrt2') self.mgrid = MetaGrid(grid=self.grid, meta_size=(4, 4), meta_buffer=10)
# calculated from well-known scale set GoogleCRS84Quad METERS_PER_DEEGREE = 111319.4907932736 def meter_per_unit(srs): if srs.is_latlong: return METERS_PER_DEEGREE return 1 class TileMatrixSet(object): def __init__(self, grid): self.grid = grid self.name = grid.name self.srs_name = grid.srs.srs_code def __iter__(self): for level, res in self.grid.resolutions.iteritems(): origin = self.grid.origin_tile(level, 'ul') bbox = self.grid.tile_bbox(origin) grid_size = self.grid.grid_sizes[level] scale_denom = res / (0.28 / 1000) * meter_per_unit(self.grid.srs) yield bunch( identifier=level, bbox=bbox, grid_size=grid_size, scale_denom=scale_denom, tile_size=self.grid.tile_size, ) if __name__ == '__main__': print TileMatrixSet(tile_grid(900913)).tile_matrixes() print TileMatrixSet(tile_grid(4326, origin='ul')).tile_matrixes()
def deserialize(self, date): raise NotImplementedError def cache(self, user_token, name, data): try: os.makedirs(self.cache_path(user_token)) except OSError, ex: if ex.errno != errno.EEXIST: # ignore error when path already exists pass with open(self.cache_file(user_token, name), 'wb') as f: f.write(self.serialize(data)) DEFAULT_GRID = tile_grid(3857, origin='nw') class CouchDBCoverages(LimiterCache): def __init__(self, cache_dir, couchdb_url, geometry_layer): LimiterCache.__init__(self, cache_dir=cache_dir) self.cache_dir = cache_dir self.couchdb_url = couchdb_url self.geometry_layer = geometry_layer def cache_file(self, user_token, name): return os.path.join(self.cache_path(user_token), name + '.wkb') def coverage(self, user_token): return self.load(user_token, 'vector-search')
import os from shapely.geometry import box from shapely import wkb, wkt from mapproxy.grid import tile_grid from gbi_server.extensions import db from gbi_server.model import WMTS from gbi_server.authproxy.limiter import LimiterCache, InvalidUserToken from gbi_server.lib.geometry import optimize_geometry from gbi_server.lib.couchdb import CouchDBBox from gbi_server.config import SystemConfig DEFAULT_GRID = tile_grid(3857, origin='nw') class TileCoverages(LimiterCache): def __init__(self, cache_dir, couchdb_url, geometry_layer, tile_grid=DEFAULT_GRID): LimiterCache.__init__(self, cache_dir=cache_dir) self.cache_dir = cache_dir self.couchdb_url = couchdb_url self.geometry_layer = geometry_layer self.tile_grid = tile_grid def cache_file(self, user_token, name): return os.path.join(self.cache_path(user_token), name + '.wkb') def is_permitted(self, user_token, layer, tile_coord): geometry = self.load(user_token, layer) if not geometry:
import requests import sys from cStringIO import StringIO from mapproxy.util.py import reraise_exception from mapproxy.util.ext.wmsparse.parse import parse_capabilities as parse_wms_capabilities from mapproxy.grid import tile_grid from . import csv from .wmtsparse import parse_capabilities as parse_wmts_capabilities, WMTSCapabilities from .exceptions import CapabilitiesError, UserError, FeatureError, ServiceError from .utils import is_supported_srs webmercator_grid = tile_grid(3857, origin='nw') def parsed_capabilities(cap_url): cap_doc = request_capabilities(cap_url) try: cap = parse_wmts_capabilities(cap_doc) except Exception as ex: try: cap_doc.seek(0) cap = parse_wms_capabilities(cap_doc) except Exception as ex: reraise_exception( CapabilitiesError('not a valid capabilities document', ex.args[0]), sys.exc_info()) return cap