def main(args): try: shapefile = args[1] driver = ogr.GetDriverByName("ESRI Shapefile") dataSource = driver.Open(shapefile, 0) layer = dataSource.GetLayer() totalImages = layer.GetFeatureCount() print str(totalImages) + " Total Images" except IndexError: usage() return False tile_size = 256 zoomlevels = [20] tms_scheme = False srs = osr.SpatialReference() srs.SetWellKnownGeogCS("WGS84") g = 0 for feature in layer: the_ID = feature.GetField("id") the_name = feature.GetField("Name") the_geom = feature.GetGeometryRef() bufferDistance = .0006 the_geom = the_geom.Buffer(bufferDistance) the_env = the_geom.GetEnvelope() ULLon = the_env[0] ULLat = the_env[3] LRLon = the_env[1] LRLat = the_env[2] id = str(g) print id + " " + str(g + 1) + " of " + str(totalImages) mbTileName = id + ".mbtiles" tempTiffName = id + ".tiff" mb = MBTilesBuilder( tiles_url= "https://tile-live.appspot.com/getTile/?z={z}&x={x}&y={y}&layer=derivative&redirect=false", filepath=mbTileName, ignore_errors="true") mb.add_coverage(bbox=(ULLon, LRLat, LRLon, ULLat), zoomlevels=[20]) mb.run() os.system('gdal_translate ' + mbTileName + ' ' + tempTiffName) os.system( 'gdalwarp ' + tempTiffName + ' clipped-' + id + '.tiff' + ' -cutline AOIs.shp -crop_to_cutline -dstalpha -cwhere "id = \'' + str(the_ID) + '\'"') os.system('rm ' + tempTiffName) os.system('mv clipped-' + id + '.tiff ' + tempTiffName) os.system('gsutil cp ' + id + '.tiff gs://tile-to-geotiff/iupui/mbtiles/') os.system('gsutil cp ' + mbTileName + ' gs://tile-to-geotiff/iupui/mbtiles/') g += 1
def download_for_offline(self, filepath, bbox, zoomlevels, delete=False): """ Starts the actual download and probes the progress. """ if delete: os.remove(filepath) mbt_merge_manager = MbtMergeManager() mbt_merge_manager.remove_from_merged(filepath) mb = MBTilesBuilder(filepath=filepath, cache=True) mb.add_coverage(bbox=bbox, zoomlevels=zoomlevels) mb_run_thread = Thread(target=mb.run, kwargs={'force': False}) mb_run_thread.start() Clock.schedule_interval( lambda dt: self.probe_mb_tiles_builder_thread(mb, mb_run_thread), 0.5)
def download_for_offline(self, filepath, bbox, zoomlevels, delete=False): """ Starts the actual download and probes the progress. """ if delete: os.remove(filepath) mbt_merge_manager = MbtMergeManager() mbt_merge_manager.remove_from_merged(filepath) mb = MBTilesBuilder(filepath=filepath, cache=True) mb.add_coverage(bbox=bbox, zoomlevels=zoomlevels) mb_run_thread = Thread(target=mb.run, kwargs={'force': False}) mb_run_thread.start() Clock.schedule_interval( lambda dt: self.probe_mb_tiles_builder_thread( mb, mb_run_thread), 0.5)
def main(args): try: shapefile = args[1] driver = ogr.GetDriverByName("ESRI Shapefile") dataSource = driver.Open(shapefile, 0) layer = dataSource.GetLayer() totalImages = layer.GetFeatureCount() print str(totalImages) + " Total Images" except IndexError: usage() return False tile_size = 256 zoomlevels = [20] tms_scheme = False srs = osr.SpatialReference() srs.SetWellKnownGeogCS("WGS84") g = 0 for feature in layer: the_ID = feature.GetField("id") the_name = feature.GetField("Name") the_geom = feature.GetGeometryRef() bufferDistance = .0006 the_geom = the_geom.Buffer(bufferDistance) the_env = the_geom.GetEnvelope() ULLon = the_env[0] ULLat = the_env[3] LRLon = the_env[1] LRLat = the_env[2] id = str(g) print id + " " + str(g+1) + " of " + str(totalImages) mbTileName = id + ".mbtiles" tempTiffName = id + ".tiff" mb = MBTilesBuilder(tiles_url="https://tile-live.appspot.com/getTile/?z={z}&x={x}&y={y}&layer=derivative&redirect=false", filepath=mbTileName,ignore_errors="true") mb.add_coverage(bbox=(ULLon, LRLat, LRLon, ULLat),zoomlevels=[20]) mb.run() os.system('gdal_translate ' + mbTileName + ' ' + tempTiffName) os.system('gdalwarp ' + tempTiffName + ' clipped-' + id + '.tiff' + ' -cutline AOIs.shp -crop_to_cutline -dstalpha -cwhere "id = \'' + str(the_ID) + '\'"') os.system('rm ' + tempTiffName) os.system('mv clipped-' + id + '.tiff ' + tempTiffName) os.system('gsutil cp ' + id + '.tiff gs://tile-to-geotiff/iupui/mbtiles/') os.system('gsutil cp ' + mbTileName + ' gs://tile-to-geotiff/iupui/mbtiles/') g += 1
def main(event): uuid = event['campaign_uuid'] logging.info(uuid) tiles_file = '{0}.mbtiles'.format(event['index']) local_mbtiles_path = os.path.join(PATH, tiles_file) mb = MBTilesBuilder( cache=False, tiles_headers={'User-Agent': 'github.com/hotosm/mapcampaigner'}, filepath=local_mbtiles_path) mb.add_coverage(bbox=event['bbox'], zoomlevels=event['zoom_levels']) mb.run() # Upload to s3. key = os.path.join('campaigns', uuid, 'mbtiles', tiles_file) with open(local_mbtiles_path, "rb") as data: client.upload_fileobj(Fileobj=data, Bucket=os.environ['S3_BUCKET'], Key=key, ExtraArgs={'ACL': 'public-read'}) spawn_make_pdf(event)
'ttl': 3600 * 24 * 265, "options": x['options'] } for x in layerdef['layers']] layerdef = json.dumps({"version": "1.0.1", "layers": layers}) layerid = json.loads( fetch('http://' + username + '.cartodb.com/tiles/layergroup', layerdef, headers={'Content-Type': 'application/json'})) return 'http://' + username + ".cartodb.com/tiles/layergroup/" + layerid[ 'layergroupid'] + "/{z}/{x}/{y}.png" if __name__ == '__main__': if len(sys.argv) < 4: print "vizsjon_url max_zoom mbtiles [nthreads]" sys.exit() nthreads = 20 if len(sys.argv) > 4: nthreads = int(sys.argv[4]) logging.basicConfig(level=logging.DEBUG) template_url = get_layergroup_url(sys.argv[1]) logging.info("%d threads" % nthreads) mb = MBTilesBuilder(tiles_url=template_url, filepath=sys.argv[3], thread_number=nthreads, errors_as_warnings=True) mb.add_coverage(bbox=(-180.0, -90.0, 180.0, 90.0), zoomlevels=[0, int(sys.argv[2])]) mb.run()
import logging import os from landez import MBTilesBuilder logging.basicConfig(level=logging.DEBUG) url = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}' zoomlevels = [4, 5, 6, 7, 8] bbox = (-7.818751263994194, 29.932874506926854, 72.01133072878456, 64.56842453507622) mb = MBTilesBuilder( tiles_url=url, cache=False, filepath="esri-very-highres-2.mbtiles", ) mb.add_coverage(bbox=bbox, zoomlevels=zoomlevels) tile_tuples = mb.tileslist(bbox, zoomlevels) print(tile_tuples[:20]) tile_urls = [url.format(x=x, y=y, z=z) for z, x, y in tile_tuples] print(tile_urls[-20:]) import random import asyncio from aiohttp import ClientSession async def fetch(url, session): relative = url.split("/tile/")[1] os.makedirs("tiles-2/" + relative.rsplit("/", 1)[0], exist_ok=True) async with session.get(url) as response: print(f"Reading {url}") content = await response.read()
import logging from landez import MBTilesBuilder logging.basicConfig(level=logging.DEBUG) headers = {} headers['User-Agent'] = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36" mb = MBTilesBuilder(tiles_url="https://api.mapbox.com/v4/mapbox.terrain-rgb/{z}/{x}/{y}.pngraw?access_token=pk.eyJ1IjoidHNjaGF1YiIsImEiOiJjaW5zYW5lNHkxMTNmdWttM3JyOHZtMmNtIn0.CDIBD8H-G2Gf-cPkIuWtRg", cache=True, filepath="kunming-rgb.mbtiles", header=headers) mb.add_coverage(bbox=(102.1, 24, 102.2, 24.1), zoomlevels=[10,11,12,13,14,15]) mb.run()
import logging from landez import MBTilesBuilder logging.basicConfig(level=logging.DEBUG) mb = MBTilesBuilder( tiles_url='https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', cache=False, filepath="esri-highres.mbtiles", ) mb.add_coverage(bbox=(-7.818751263994194, 29.932874506926854, 72.01133072878456, 64.56842453507622), zoomlevels=[0, 1, 2, 3, 4, 5, 6, 7, 8]) mb.run()
from mbutil import mbtiles_to_disk logging.basicConfig(level=logging.DEBUG) KEY = 'gnk8fnku3lwbxjz1fz34xx32' # use png extension instead of jpeg # jpeg will actually be downloaded but extension is png which won't break # mbutil url = "http://gpp3-wxs.ign.fr/%s/wmts?SERVICE=WMTS&VERSION=1.0.0&REQUEST=GetTile&LAYER=GEOGRAPHICALGRIDSYSTEMS.MAPS&STYLE=normal&FORMAT=image/jpeg&TILEMATRIXSET=PM&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}.png" % KEY filepath = "ign.mbtiles" mb = MBTilesBuilder(cache=True, tiles_url=url, tiles_headers={'Referer': 'localhost'}, filepath=filepath) if os.path.exists(mb.filepath): if os.path.exists(mb.cache.folder): shutil.rmtree(mb.cache.folder) mbtiles_to_disk(mb.filepath, mb.cache.folder) f = file('ign2mbtiles.geojson', 'r') boxes_as_geojson = f.read() features = geojson.loads(boxes_as_geojson).features zooms = [11, 13, 15] for feature in features: bbox = shape(feature.geometry).bounds mb.add_coverage(bbox=bbox, zoomlevels=zooms) mb.run(force=True)
return urllib2.urlopen(request).read() def get_layergroup_url(vizjson_url): vizjson = vizjson_url username = vizjson.split('.')[0].replace('http://', '') vizjson = json.loads(fetch(sys.argv[1])) layerdef = vizjson['layers'][1]['options']['layer_definition'] layers = [ { "type": 'cartodb', 'ttl': 3600*24*265, "options": x['options'] } for x in layerdef['layers'] ] layerdef = json.dumps({ "version": "1.0.1", "layers": layers }) layerid = json.loads(fetch('http://' + username + '.cartodb.com/tiles/layergroup', layerdef, headers={ 'Content-Type': 'application/json' })) return 'http://' + username + ".cartodb.com/tiles/layergroup/" + layerid['layergroupid'] + "/{z}/{x}/{y}.png" if __name__ == '__main__': if len(sys.argv) < 4: print "vizsjon_url max_zoom mbtiles [nthreads]" sys.exit() nthreads = 20 if len(sys.argv) > 4: nthreads = int(sys.argv[4]) logging.basicConfig(level=logging.DEBUG) template_url = get_layergroup_url(sys.argv[1]) logging.info("%d threads" % nthreads) mb = MBTilesBuilder(tiles_url=template_url, filepath=sys.argv[3], thread_number=nthreads, errors_as_warnings=True) mb.add_coverage( bbox=(-180.0, -90.0, 180.0, 90.0), zoomlevels=[0, int(sys.argv[2])] ) mb.run()