def process_url(mydir, url, ymd, bbox, zoom, s3_bucket, s3_folder): csv_filename = os.path.join(os.path.join(mydir, "modis_af." + ymd + '.csv')) geojson_filename = os.path.join( os.path.join(mydir, "modis_af." + ymd + '.geojson')) geojsongz_filename = os.path.join( os.path.join(mydir, "modis_af." + ymd + '.geojson.gz')) tif_filename = os.path.join(os.path.join(mydir, "modis_af." + ymd + '.tif')) osm_bg_image = os.path.join(os.path.join(mydir, "osm_bg_image.tif")) thn_image = os.path.join( os.path.join(mydir, "modis_af." + ymd + '_thn.jpg')) if force or not os.path.exists(csv_filename): urllib.urlretrieve(url, csv_filename) if force or not os.path.exists(geojson_filename): csv_to_geojson(csv_filename, geojson_filename, bbox) if force or not os.path.exists(geojsongz_filename): cmd = 'gzip < %s > %s' % (geojson_filename, geojsongz_filename) execute(cmd) #url = "https://firms.modaps.eosdis.nasa.gov/wms/?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&LAYERS=fires24&width=400&height=250&BBOX=54,5.5,102,40" #if force or not os.path.exists(tif_filename): # urllib.urlretrieve(url, tif_filename) # print "retrieved ", tif_filename centerlat = (bbox[1] + bbox[3]) / 2 centerlon = (bbox[0] + bbox[2]) / 2 rasterXSize = 400 rasterYSize = 250 mapbox_image(centerlat, centerlon, zoom, rasterXSize, rasterYSize, osm_bg_image) ullon, ullat, lrlon, lrlat = browseimage.Gen_bbox(centerlat, centerlon, zoom, rasterXSize, rasterYSize) #print ullon, lrlat, lrlon, ullat url = "https://firms.modaps.eosdis.nasa.gov/wms/?SERVICE=WMS&VERSION=1.1.1&REQUEST=GetMap&LAYERS=fires24&width=400&height=250&BBOX=" url += str(ullon) + "," + str(lrlat) + "," + str(lrlon) + "," + str(ullat) if force or not os.path.exists(tif_filename): urllib.urlretrieve(url, tif_filename) #print "retrieved ", tif_filename # superimpose the suface water over map background if force or not os.path.isfile(thn_image): cmd = str.format("composite -gravity center {0} {1} {2}", tif_filename, osm_bg_image, thn_image) execute(cmd) file_list = [tif_filename, geojson_filename, geojsongz_filename, thn_image] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose)
def MergeLevels(srcPath, ymd, bbox, zoom): global force, verbose geojsonDir = os.path.join(srcPath, "geojson") merge_filename = os.path.join(srcPath, "frost_merged.geojson") topojson_filename = os.path.join(srcPath, "frost.%s.topojson" % ymd) sw_osm_image = os.path.join(srcPath, "frost.%s_thn.jpg" % ymd) osm_bg_image = os.path.join(srcPath, "osm_bg.png") browse_filename = os.path.join(srcPath, "frost.%s_browse.tif" % ymd) small_browse_filename = os.path.join(srcPath, "frost.%s_small_browse.tif" % ymd) smoothedFileName = os.path.join(srcPath, "Smoothed_Frost." + ymd + ".tif") compositeFileName = os.path.join(srcPath, "Frost." + ymd + ".tif") if force or not os.path.exists(merge_filename): jsonDict = dict(type='FeatureCollection', features=[]) for l in range(1, 5): i = l fileName = os.path.join(geojsonDir, "frost_level_%d.geojson" % l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) if force or not os.path.exists(topojson_filename): # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename execute(cmd) if force or not os.path.exists(topojson_filename + ".gz"): cmd = "gzip --keep " + topojson_filename execute(cmd) if force or not os.path.exists(sw_osm_image): #ds = gdal.Open( smoothedFileName ) ds = gdal.Open(compositeFileName) levels = [5, 4, 3, 2, 1] hexColors = ["#00FF00", "#FF9A00", "#FF0000", "#FF99CC", "#CC00CC"] MakeBrowseImage(ds, browse_filename, small_browse_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom) ds = None file_list = [sw_osm_image, topojson_filename, topojson_filename + ".gz"] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose)
def process_file(mydir, filename, s3_bucket, s3_folder): print "Processing", filename geojsonDir = os.path.join(mydir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) #shpDir = os.path.join(mydir,"shp") #if not os.path.exists(shpDir): # os.makedirs(shpDir) super_subset_file = os.path.join(mydir, "geos5_precip_super.%s.tif" % ymd) merge_filename = os.path.join(geojsonDir, "geos5_precip.%s.geojson" % ymd) topojson_filename = os.path.join(geojsonDir, "..", "geos5_precip.%s.topojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "geos5_precip.%s_browse.tif" % ymd) subset_filename = os.path.join(geojsonDir, "..", "geos5_precip.%s_small_browse.tif" % ymd) subset_aux_filename = os.path.join( geojsonDir, "..", "geos5_precip.%s_small_browse.tif.aux.xml" % ymd) osm_bg_image = os.path.join(mydir, "../..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "geos5_precip.%s_thn.jpg" % ymd) json_filename = os.path.join(geojsonDir, "geos5_precip.%s.json" % (ymd)) #shp_filename = os.path.join(mydir, "geos5_precip.%s.shp.gz" % (ymd)) #shp_zip_file = os.path.join(mydir, "geos5_precip.%s.shp.zip" % (ymd)) #if force or not os.path.exists(subset_file): # cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % (bbox[0], bbox[1], bbox[2], bbox[3], filename, subset_file) # execute(cmd) ds = gdal.Open(filename) geotransform = ds.GetGeoTransform() px = geotransform[1] / 2 py = geotransform[5] / 2 xorg = geotransform[0] yorg = geotransform[3] xmax = xorg + geotransform[1] * ds.RasterXSize ymax = yorg + geotransform[5] * ds.RasterYSize #print ymax, xorg, yorg, xmax ds = None # upsample and convolve if force or not os.path.exists(super_subset_file): # we need to have square pixels cmd = "gdalwarp -overwrite -q -r cubicspline -tr %s %s -co COMPRESS=LZW %s %s" % ( str(px), str(px), filename, super_subset_file) execute(cmd) levels = [377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3] # http://hclwizard.org/hcl-color-scheme/ # http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/ # from http://tristen.ca/hcl-picker/#/hlc/12/1/241824/55FEFF # This is in inverse order from levels #hexColors = [ "#56F6FC","#58DEEE","#5BC6DE","#5EAFCC","#5E99B8","#5D84A3","#596F8D","#535B77","#4A4861","#3F374B","#322737","#241824"] # GPM palette hexColors = [ "#f7fcf0", "#e0f3db", "#ccebc5", "#a8ddb5", "#7bccc4", "#4eb3d3", "#2b8cbe", "#0868ac", "#084081", "#810F7C", "#4D004A" ] ds = gdal.Open(super_subset_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "geos5_precip", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "geos5_precip_level_%d.geojson" % l) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson quiet = "> /dev/null 2>&1" if verbose: quiet = " " cmd = "topojson -p --bbox --simplify-proportion 0.5 -o " + topojson_filename + " " + merge_filename + quiet execute(cmd) cmd = "gzip -f " + topojson_filename execute(cmd) # Create shapefile gz #if force or not os.path.exists(shp_filename): # # Convert simplified topojson to geojson # cmd = "topojson-geojson --precision 4 %s -o %s" % (topojson_filename, geojsonDir) # execute(cmd) # cmd = "ogr2ogr -f 'ESRI Shapefile' %s %s" % (shpDir, json_filename) # execute(cmd) #cmd = "cd %s; tar -zcvf %s %s" % (mydir, shp_filename, shpDir) # cmd = "cd %s; zip %s shp/*" %(mydir, shp_zip_file) # execute(cmd) if not os.path.exists(osm_bg_image): #print "wms", ymax, xorg, yorg, xmax, osm_bg_image wms(90, -180, -90, 180, osm_bg_image) if force or not os.path.exists(sw_osm_image): zoom = 1 scale = 1 rColors = list(reversed(hexColors)) MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, rColors, force, verbose, zoom, scale) ds = None file_list = [sw_osm_image, topojson_filename + ".gz", filename] CopyToS3(s3_bucket, s3_folder, file_list, 1, 1) if not verbose: # Cleanup if config.USING_AWS_S3_FOR_STORAGE: cmd = "rm -rf %s " % (mydir) execute(cmd) else: cmd = "rm -rf %s %s %s %s %s %s %s %s" % ( merge_filename, browse_filename, topojson_filename, subset_filename, super_subset_file, subset_aux_filename, geojsonDir, levelsDir) execute(cmd)
def process(mydir, lsFile, regionName, region, s3_bucket, s3_folder): scene = regionName subsetFileName = os.path.join(mydir, "ls.2011_subset.tif") if force or not os.path.exists(subsetFileName): bbox = region['bbox'] print region['name'], region['bbox'] warpOptions = "-q -overwrite -co COMPRESS=DEFLATE -t_srs EPSG:4326 -te %s %s %s %s " % ( bbox[0], bbox[1], bbox[2], bbox[3]) warpCmd = 'gdalwarp ' + warpOptions + lsFile + ' ' + subsetFileName execute(warpCmd) if verbose: print "LS Subset", subsetFileName if verbose: print "Processing", subsetFileName geojsonDir = os.path.join(mydir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "%s_levels.geojson" % scene) topojson_filename = os.path.join(geojsonDir, "..", "ls.2011.topojson") browse_filename = os.path.join(geojsonDir, "..", "ls.2011_browse.tif") subset_filename = os.path.join(geojsonDir, "..", "ls.2011_small_browse.tif") osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "ls.2011_thn.jpg") levels = [5500, 3400, 2100, 1300, 800, 500, 300, 200, 100] # From http://colorbrewer2.org/ hexColors = [ "#f7f4f9", "#e7e1ef", "#d4b9da", "#c994c7", "#df65b0", "#e7298a", "#ce1256", "#980043", "#67001f" ] ds = gdal.Open(subsetFileName) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, scene + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "population", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "population_level_%d.geojson" % l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep " + topojson_filename execute(cmd) if not os.path.exists(osm_bg_image): geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] xmax = xorg + geotransform[1] * ds.RasterXSize ymax = yorg + geotransform[5] * ds.RasterYSize ullat = yorg ullon = xorg lrlat = ymax lrlon = xmax print "wms", ullat, ullon, lrlat, lrlon wms(ullat, ullon, lrlat, lrlon, osm_bg_image) if force or not os.path.exists(sw_osm_image): zoom = region['thn_zoom'] MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename + ".gz", subsetFileName ] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose)
def process_url( mydir, url, ymd, bbox, zoom, s3_bucket, s3_folder ): orig_filename = os.path.join(os.path.join(mydir, "..", "quakes." + ymd + '.geojson')) csv_filename = os.path.join(os.path.join(mydir, "quakes." + ymd + '.csv')) geojson_filename = os.path.join(os.path.join(mydir, "quakes." + ymd + '.geojson')) geojsongz_filename = os.path.join(os.path.join(mydir, "quakes." + ymd + '.geojson.gz')) tif_filename = os.path.join(os.path.join(mydir, "quakes." + ymd + '.tif')) osm_bg_image = os.path.join(os.path.join(mydir, "osm_bg_image.tif")) thn_image = os.path.join(os.path.join(mydir, "quakes." + ymd + '_thn.jpg')) if force or not os.path.exists(orig_filename): if verbose: print "retrieving:", orig_filename urllib.urlretrieve(url, orig_filename) json_data = open(orig_filename).read() data = json.loads(json_data) results = {} results['type'] = "FeatureCollection" results['bbox'] = data['bbox'] results['metadata'] = data['metadata'] results['features'] = [] for f in data['features']: coords = f['geometry']['coordinates'] lon = coords[0] lat = coords[1] if inbbox(bbox, lat, lon): qtime = int(f['properties']['time'])/1000 f['properties']['date'] = time.ctime(qtime) newprops = { 'type': f['properties']['type'], 'title': f['properties']['title'], 'place': f['properties']['place'], 'mag': f['properties']['mag'], 'rms': f['properties']['rms'], 'status': f['properties']['status'], 'date': f['properties']['date'], 'detail': f['properties']['detail'], 'url': f['properties']['url'] } f['properties'] = newprops results['features'].append(f) if verbose: print "found", len(results['features']) with open(geojson_filename, 'w') as outfile: json.dump(results, outfile) if force or not os.path.exists(geojsongz_filename): cmd = 'gzip < %s > %s' %( geojson_filename, geojsongz_filename) execute(cmd) centerlat = (bbox[1]+bbox[3])/2 centerlon = (bbox[0]+bbox[2])/2 rasterXSize = 400 rasterYSize = 250 mapbox_image(centerlat, centerlon, zoom, rasterXSize, rasterYSize, osm_bg_image) ullon, ullat, lrlon, lrlat = browseimage.Gen_bbox(centerlat, centerlon, zoom, rasterXSize, rasterYSize) dx = (lrlon-ullon)/rasterXSize dy = (ullat-lrlat)/rasterXSize #print "org:", ullon, ullat, dx, dy im = Image.open(osm_bg_image) draw = ImageDraw.Draw(im) for f in results['features']: coords = f['geometry']['coordinates'] lon = coords[0] lat = coords[1] x = int((lon-ullon)/dx) y = int((ullat-lat)/dx) #print lon, lat, x, y draw.ellipse( [(x-1,y-1),(x+1,y+1)]) im.save(tif_filename, "PNG") # superimpose the suface water over map background #if force or not os.path.isfile(sw_osm_image): if force or not os.path.isfile(thn_image): #cmd = str.format("composite -gravity center {0} {1} {2}", tif_filename, osm_bg_image, thn_image) cmd = "cp %s %s" % (tif_filename, thn_image) execute(cmd) file_list = [ geojson_filename, geojsongz_filename, thn_image ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ) if not verbose: cmd = "rm -rf %s %s" % (tif_filename, osm_bg_image) execute(cmd)
def process(mydir, scene, s3_bucket, s3_folder): fullName = os.path.join(mydir, scene+".tif") if not os.path.exists(fullName): print "File does not exist", fullName sys.exit(-1) # Flood inundation map for Namibia has to large of an extent [[10,-30,30,-10]] # we can trim it [15, -20, 20, -10] subsetFileName = os.path.join(mydir, "%s_subset.tif" % scene) if force or not os.path.exists(subsetFileName): bbox = [15, -20, 20, -12] warpOptions = "-q -overwrite -co COMPRESS=DEFLATE -t_srs EPSG:4326 -te %s %s %s %s " % (bbox[0], bbox[1], bbox[2], bbox[3]) warpCmd = 'gdalwarp ' + warpOptions + fullName + ' ' + subsetFileName execute( warpCmd ) #sys.exit(-1) geojsonDir = os.path.join(mydir,"geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir,"levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "ef5.%s.geojson" % scene) topojson_filename = os.path.join(geojsonDir, "..", "ef5.%s.topojson" % scene) browse_filename = os.path.join(geojsonDir, "..", "ef5.%s_browse.tif" % scene) small_filename = os.path.join(geojsonDir, "..", "ef5.%s_small_browse.tif" % scene) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "ef5.%s_thn.jpg" % scene) ds = gdal.Open( subsetFileName ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) levels = [ 21, 13, 8, 5, 3, 2, 1] hexColors = [ "#fee5d9", "#fcbba1", "#fc9272", "#fb6a4a", "#ef3b2c", "#cb181d", "#99000d"] if force or not os.path.exists(topojson_filename+".gz"): if verbose: print "Processing", subsetFileName for l in levels: fileName = os.path.join(levelsDir, scene+"_level_%d.tif"%l) CreateLevel(l, geojsonDir, fileName, ds, data, "height", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "height_level_%d.geojson"%l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o "+ topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep "+ topojson_filename execute(cmd) if force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, subsetFileName, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, 6) # we could remove geojsonDir and levelsDir #cmd = "rm -rf %s %s" %(geojsonDir, levelsDir) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename+".gz", fullName ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose )
def process(gpm_dir, gis_file_day, region, s3_bucket, s3_folder, ymd): # subset the file for that region bbox = region['bbox'] gis_file = os.path.join(BASE_DIR, gpm_dir, gis_file_day) subset_file = os.path.join(BASE_DIR, gpm_dir, "gpm_24.%s.tif" % ymd) if force or not os.path.exists(subset_file): cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % ( bbox[0], bbox[1], bbox[2], bbox[3], gis_file, subset_file) execute(cmd) geojsonDir = os.path.join(gpm_dir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(gpm_dir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "gpm_24.%s.geojson" % ymd) topojson_filename = os.path.join(geojsonDir, "..", "gpm_24.%s.topojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "gpm_24.%s_browse.tif" % ymd) subset_filename = os.path.join(geojsonDir, "..", "gpm_24.%s_small_browse.tif" % ymd) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "gpm_24.%s_thn.jpg" % ymd) levels = [144, 89, 55, 34, 21, 13, 8, 5, 3, 2, 1] # From http://colorbrewer2.org/ hexColors = [ "#f7fcf0", "#e0f3db", "#ccebc5", "#a8ddb5", "#7bccc4", "#4eb3d3", "#2b8cbe", "#0868ac", "#084081", "#810F7C", "#4D004A" ] ds = gdal.Open(subset_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) data /= 10 # back to mm if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "daily_precipitation", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "daily_precipitation_level_%d.geojson" % l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep " + topojson_filename execute(cmd) if force or not os.path.exists(sw_osm_image): # problem is that we need to scale it or adjust the levels for coloring (easier) adjusted_levels = [1440, 890, 550, 340, 210, 130, 80, 50, 30, 20, 10] MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, adjusted_levels, hexColors, force, verbose) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename + ".gz", subset_file ] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose)
def process(gpm_dir, name, gis_file, ymd, regionName, s3_bucket, s3_folder, levels, hexColors): global force, verbose region = config.regions[regionName] region_dir = os.path.join(gpm_dir, regionName) if not os.path.exists(region_dir): os.makedirs(region_dir) origFileName = os.path.join(gpm_dir, gis_file) print "processing ", regionName, name, gis_file if not os.path.exists(origFileName): print "File does not exist", origFileName return # # subset the file for that region # bbox = region['bbox'] subset_file = os.path.join(region_dir, "%s.%s_ss.tif" % (name, ymd)) geojsonDir = os.path.join(region_dir, "geojson_%s" % (name)) levelsDir = os.path.join(region_dir, "levels_%s" % (name)) origFileName_tfw = origFileName.replace(".tif", ".tfw") supersampled_file = os.path.join(region_dir, "%s.%s_x2.tif" % (name, ymd)) merge_filename = os.path.join(geojsonDir, "..", "%s.%s.geojson" % (name, ymd)) topojson_filename = os.path.join(geojsonDir, "..", "%s.%s.topojson" % (name, ymd)) topojson_gz_filename = os.path.join(region_dir, "%s.%s.topojson.gz" % (name, ymd)) browse_filename = os.path.join(geojsonDir, "..", "%s.%s_browse.tif" % (name, ymd)) subset_aux_filename = os.path.join( geojsonDir, "..", "%s.%s_small_browse.tif.aux.xml" % (name, ymd)) subset_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif" % (name, ymd)) osm_bg_image = os.path.join(gpm_dir, "..", "%s_osm_bg.png" % regionName) sw_osm_image = os.path.join(region_dir, "%s.%s_thn.png" % (name, ymd)) tif_image = os.path.join(region_dir, "%s.%s.tif" % (name, ymd)) rgb_tif_image = os.path.join(region_dir, "%s.%s.rgb.tif" % (name, ymd)) #geojson_filename = os.path.join(region_dir, "..", "%s.%s.json" % (name,ymd)) # subset if force or not os.path.exists(subset_file): cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % ( bbox[0], bbox[1], bbox[2], bbox[3], origFileName, subset_file) execute(cmd) ds = gdal.Open(origFileName) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pixelsize = geotransform[1] if regionName == 'global': pixelsize /= 2 method = 'near' else: pixelsize /= 5 method = 'near' # supersample if force or not os.path.exists(supersampled_file): cmd = "gdalwarp -overwrite -q -r %s -tr %f %f -te %f %f %f %f -co COMPRESS=LZW %s %s" % ( method, pixelsize, pixelsize, bbox[0], bbox[1], bbox[2], bbox[3], subset_file, supersampled_file) execute(cmd) if verbose: color_file = color_table(name) if force or (verbose and not os.path.exists(rgb_tif_image)): cmd = "gdaldem color-relief -q -alpha -of GTiff %s %s %s" % ( supersampled_file, color_file, rgb_tif_image) execute(cmd) if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) if not os.path.exists(levelsDir): os.makedirs(levelsDir) ds = gdal.Open(supersampled_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pixelsize = geotransform[1] xmax = xorg + geotransform[1] * ds.RasterXSize ymax = yorg - geotransform[1] * ds.RasterYSize data[data > 9000] = 0 # No value sdata = data / 10 # back to mm if regionName != 'global': # Invoke the node script to subset the global geojson global_dir = os.path.join(gpm_dir, "global") global_geojson = os.path.join(global_dir, "%s.%s.geojson" % (name, ymd)) if not os.path.exists(global_geojson): print "missing global geojson", global_geojson sys.exit(-1) print "doing regionsl subset...", regionName, global_geojson cmd = "node ../subsetregions.js " + regionName + " " + global_geojson execute(cmd) else: if force or not os.path.exists(topojson_filename + ".gz"): for idx, l in enumerate(levels): #print "level", idx #if idx < len(levels)-1: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) #CreateLevel(l, levels[idx+1], geojsonDir, fileName, ds, sdata, "precip") CreateLevel(l, geojsonDir, fileName, ds, sdata, "precip", regionName) jsonDict = dict(type='FeatureCollection', features=[]) for idx, l in enumerate(levels): fileName = os.path.join(geojsonDir, "precip_level_%d.geojson" % l) if os.path.exists(fileName): with open(fileName) as data_file: jdata = json.load(data_file) if 'features' in jdata: for f in jdata['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) quiet = " > /dev/null 2>&1" if verbose: quiet = " " # Convert to topojson cmd = "topojson --no-stitch-poles --bbox -p precip -o " + topojson_filename + " " + merge_filename + quiet execute(cmd) if verbose: keep = " --keep " else: keep = " " cmd = "gzip -f " + keep + topojson_filename execute(cmd) if not os.path.exists(osm_bg_image): #if verbose: print "calling wms", regionName, ymax, xorg, yorg, xmax, osm_bg_image wms(yorg, xorg, ymax, xmax, osm_bg_image) def scale(x): return x * 10 adjusted_levels = map(scale, levels) zoom = 2 if force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, list(reversed(adjusted_levels)), list(reversed(hexColors)), force, verbose, zoom) if force or not os.path.exists(tif_image): cmd = "gdalwarp -overwrite -q -co COMPRESS=LZW %s %s" % (subset_file, tif_image) execute(cmd) ds = None file_list = [sw_osm_image, topojson_filename + ".gz", tif_image] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose) if not verbose: # Cleanup if config.USING_AWS_S3_FOR_STORAGE: # moved to end cmd = "rm -rf %s " % (gpm_dir) #print cmd #execute(cmd) else: cmd = "rm -rf %s %s %s %s %s %s %s %s %s %s %s %s" % ( origFileName, origFileName_tfw, supersampled_file, merge_filename, topojson_filename, subset_aux_filename, browse_filename, subset_filename, subset_file, rgb_tif_image, geojsonDir, levelsDir) execute(cmd)
def process(gpm_dir, name, gis_file_day, ymd ): global force, verbose, levels, hexColors regionName = 'global' region_dir = os.path.join(gpm_dir,regionName) if not os.path.exists(region_dir): os.makedirs(region_dir) origFileName = os.path.join(gpm_dir,gis_file_day) if not os.path.exists(origFileName): print "File does not exist", origFileName return ds = gdal.Open(origFileName) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pixelsize = geotransform[1] xmax = xorg + geotransform[1]* ds.RasterXSize ymax = yorg - geotransform[1]* ds.RasterYSize bbox = [xorg, ymax, xmax, yorg] geojsonDir = os.path.join(region_dir,"geojson_%s" % (name)) levelsDir = os.path.join(region_dir,"levels_%s" % (name)) origFileName_tfw = origFileName.replace(".tif", ".tfw") supersampled_file = os.path.join(region_dir, "%s.%s_x2.tif" % (name, ymd)) merge_filename = os.path.join(geojsonDir, "%s.%s.geojson" % (name, ymd)) topojson_filename = os.path.join(geojsonDir, "..", "%s.%s.topojson" % (name,ymd)) topojson_gz_filename = os.path.join(region_dir, "%s.%s.topojson.gz" % (name,ymd)) browse_filename = os.path.join(geojsonDir, "..", "%s.%s_browse.tif" % (name,ymd)) subset_aux_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif.aux.xml" % (name, ymd)) subset_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif" % (name, ymd)) osm_bg_image = os.path.join(config.data_dir, "gpm", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s.%s_thn.png" % (name, ymd)) tif_image = os.path.join(geojsonDir, "..", "%s.%s.tif" % (name, ymd)) rgb_tif_image = os.path.join(geojsonDir, "..", "%s.%s.rgb.tif" % (name, ymd)) geojson_filename = os.path.join(geojsonDir, "..", "%s.%s.json" % (name,ymd)) if not force and os.path.exists(topojson_gz_filename): print "return Found", topojson_gz_filename return print "Processing", gis_file_day, topojson_gz_filename if force or not os.path.exists(supersampled_file): cmd = "gdalwarp -overwrite -q -tr %f %f -te %f %f %f %f -co COMPRESS=LZW %s %s"%(pixelsize/2, pixelsize/2, bbox[0], bbox[1], bbox[2], bbox[3], origFileName, supersampled_file) execute(cmd) if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) if not os.path.exists(levelsDir): os.makedirs(levelsDir) #levels = [377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3] # http://hclwizard.org/hcl-color-scheme/ # http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/ # from http://tristen.ca/hcl-picker/#/hlc/12/1/241824/55FEFF # Light to dark # hexColors = [ "#56F6FC","#58DEEE","#5BC6DE","#5EAFCC","#5E99B8","#5D84A3","#596F8D","#535B77","#4A4861","#3F374B","#322737","#241824"] # GPM palette #hexColors = [ "#f7fcf0","#e0f3db","#ccebc5","#a8ddb5","#7bccc4","#4eb3d3","#2b8cbe","#0868ac","#084081","#810F7C","#4D004A" ] # Current Green-Red GPM Palette & Levels # Data in file is multiplied by 10 and are in mm #levels = [ 500, 200, 100, 50, 30, 20, 10, 5, 3, 2, 1] if verbose: switch(name) color_file = color_table(name) if force or (verbose and not os.path.exists(rgb_tif_image)): cmd = "gdaldem color-relief -q -alpha -of GTiff %s %s %s" % ( supersampled_file, color_file, rgb_tif_image) execute(cmd) ds = gdal.Open( supersampled_file ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) data[data>9000] = 0 # No value sdata = data/10 # back to mm if force or not os.path.exists(topojson_filename+".gz"): for idx, l in enumerate(levels): #print "level", idx #if idx < len(levels)-1: fileName = os.path.join(levelsDir, ymd+"_level_%d.tif"%l) #CreateLevel(l, levels[idx+1], geojsonDir, fileName, ds, sdata, "precip") CreateLevel(l, geojsonDir, fileName, ds, sdata, "precip") jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "precip_level_%d.geojson"%l) if os.path.exists(fileName): with open(fileName) as data_file: jdata = json.load(data_file) if 'features' in jdata: for f in jdata['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) quiet = " > /dev/null 2>&1" #if verbose: # quiet = " " # Convert to topojson cmd = "topojson --no-stitch-poles --bbox -p precip -o "+ topojson_filename + " " + merge_filename + quiet execute(cmd) cmd = "gzip -f --keep "+ topojson_filename execute(cmd) # Convert to shapefile #if 1: #and os.path.exists(merge_filename): # cmd= "ogr2ogr -f 'ESRI Shapefile' %s %s" % ( shpDir, merge_filename) # execute(cmd) #if force or not os.path.exists(shp_zip_file): # #cmd = "cd %s; tar -cvzf %s shp" %(region_dir, shapefile_gz) # cmd = "cd %s; zip %s shp_%s/*" %(region_dir, shp_zip_file, name) # execute(cmd) # problem is that we need to scale it or adjust the levels for coloring (easier) def scale(x): return x*10 adjusted_levels = map(scale, levels) #adjusted_levels = [3770, 2330, 1440, 890, 550, 340, 210, 130, 80, 50, 30] if not os.path.exists(osm_bg_image): if verbose: print "wms", ymax, xorg, yorg, xmax, osm_bg_image wms(yorg, xorg, ymax, xmax, osm_bg_image) zoom = 2 if force or not os.path.exists(sw_osm_image): rColors = list(reversed(hexColors)) MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, adjusted_levels, rColors, force, verbose, zoom) if force or not os.path.exists(tif_image): cmd = "gdalwarp -overwrite -q -co COMPRESS=LZW %s %s"%( origFileName, tif_image) execute(cmd) ds = None file_list = [ sw_osm_image, topojson_filename+".gz", tif_image ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ) if not verbose: # Cleanup if config.USING_AWS_S3_FOR_STORAGE: cmd = "rm -rf %s " % (gpm_dir) execute(cmd) else: cmd = "rm -rf %s %s %s %s %s %s %s %s %s %s" % (origFileName, origFileName_tfw, supersampled_file, merge_filename, topojson_filename, subset_aux_filename, browse_filename, subset_filename, geojsonDir, levelsDir) execute(cmd)
def process(gpm_dir, name, gis_file_day, ymd, regionName, region, s3_bucket, s3_folder): # subset the file for that region bbox = region['bbox'] gis_file = os.path.join(gpm_dir, gis_file_day) if not os.path.exists(gis_file): print "gis file does not exist", gis_file sys.exit(-1) region_dir = os.path.join(gpm_dir, regionName) if not os.path.exists(region_dir): os.makedirs(region_dir) subset_file = os.path.join(region_dir, "%s.%s.tif" % (name, ymd)) if force or not os.path.exists(subset_file): cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % ( bbox[0], bbox[1], bbox[2], bbox[3], gis_file, subset_file) execute(cmd) geojsonDir = os.path.join(region_dir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(region_dir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "%s.%s.geojson" % (name, ymd)) topojson_filename = os.path.join(geojsonDir, "..", "%s.%s.topojson" % (name, ymd)) browse_filename = os.path.join(geojsonDir, "..", "%s.%s_browse.tif" % (name, ymd)) subset_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif" % (name, ymd)) subset_aux_filename = os.path.join( geojsonDir, "..", "%s.%s_small_browse.tif.aux" % (name, ymd)) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s.%s_thn.jpg" % (name, ymd)) levels = [377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3] # From http://colorbrewer2.org/ hexColors = [ "#f7fcf0", "#e0f3db", "#ccebc5", "#a8ddb5", "#7bccc4", "#4eb3d3", "#2b8cbe", "#0868ac", "#084081", "#810F7C", "#4D004A" ] ds = gdal.Open(subset_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pres = geotransform[1] xmax = xorg + geotransform[1] * ds.RasterXSize ymax = yorg - geotransform[1] * ds.RasterYSize data /= 10 # back to mm if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "precip", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "precip_level_%d.geojson" % l) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) if verbose: output = " " else: output = " > /dev/null 2>&1" # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename + output execute(cmd) cmd = "gzip -f " + topojson_filename execute(cmd) # problem is that we need to scale it or adjust the levels for coloring (easier) adjusted_levels = [3770, 2330, 1440, 890, 550, 340, 210, 130, 80, 50, 30] zoom = region['thn_zoom'] if not os.path.exists(osm_bg_image): ullat = yorg ullon = xorg lrlat = ymax lrlon = xmax wms(ullat, ullon, lrlat, lrlon, osm_bg_image) if force or not os.path.exists(sw_osm_image): rColors = list(reversed(hexColors)) MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, adjusted_levels, rColors, force, verbose, zoom) ds = None file_list = [sw_osm_image, topojson_filename + ".gz", subset_file] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose) if not verbose: # Cleanup cmd = "rm -rf %s %s %s %s %s %s" % ( osm_bg_image, browse_filename, subset_filename, subset_aux_filename, geojsonDir, levelsDir) execute(cmd)
def process_viirs_chla_file( mydir, regionName, viirs_filename, s3_bucket, s3_folder): print "Processing", viirs_filename+":chlor_a" region = config.regions[regionName] bbox = region['bbox'] rdir = os.path.join(mydir, regionName) if not os.path.exists(rdir): os.makedirs(rdir) geojsonDir = os.path.join(rdir,"geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(rdir,"levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) shpDir = os.path.join(rdir,"shp") if not os.path.exists(shpDir): os.makedirs(shpDir) subset_file = os.path.join(rdir, "viirs_chla.%s.tif" % ymd) super_subset_file = os.path.join(rdir, "viirs_chla_super.%s.tif" % ymd) merge_filename = os.path.join(geojsonDir, "viirs_chla.%s.geojson" % ymd) topojson_filename = os.path.join(geojsonDir, "..", "viirs_chla.%s.topojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "viirs_chla.%s_browse.tif" % ymd) subset_filename = os.path.join(geojsonDir, "..", "viirs_chla.%s_small_browse.tif" % ymd) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "viirs_chla.%s_thn.jpg" % ymd) shp_filename = os.path.join(rdir, "viirs_chla.%s.shp.gz" % (ymd)) json_filename = os.path.join(geojsonDir, "viirs_chla.%s.json" % (ymd)) if force or not os.path.exists(subset_file): cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % (bbox[0], bbox[1], bbox[2], bbox[3], viirs_filename, subset_file) execute(cmd) ds = gdal.Open( subset_file ) geotransform = ds.GetGeoTransform() px = geotransform[1] / 10 py = geotransform[5] / 10 ds = None # upsample and convolve if force or not os.path.exists(super_subset_file): cmd = "gdalwarp -overwrite -q -r cubicspline -tr %s %s -te %f %f %f %f -co COMPRESS=LZW %s %s" % (str(px), str(py), bbox[0], bbox[1], bbox[2], bbox[3], subset_file, super_subset_file) execute(cmd) levels = [350, 100, 50, 30, 20, 15, 10, 5, 3, 1] # From http://colorbrewer2.org/ hexColors = ["#5e4fa2", "#3288bd", "#66c2a5", "#abdda4", "#e6f598", "#fee08b", "#fdae61", "#f46d43", "#d53e4f", "#9e0142"] ds = gdal.Open( super_subset_file ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) data *= 100 if force or not os.path.exists(topojson_filename+".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd+"_level_%d.tif"%l) CreateLevel(l, geojsonDir, fileName, ds, data, "viirs_chla", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "viirs_chla_level_%d.geojson"%l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o "+ topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep "+ topojson_filename execute(cmd) # Create shapefile gz if force or not os.path.exists(shp_filename): # Convert simplified topojson to geojson cmd = "topojson-geojson --precision 5 %s -o %s" % (topojson_filename, geojsonDir) execute(cmd) cmd = "ogr2ogr -f 'ESRI Shapefile' %s %s" % (shpDir, json_filename) execute(cmd) cmd = "cd %s; tar -zcvf %s %s" % (rdir, shp_filename, shpDir) execute(cmd) if force or not .path.exists(sw_osm_image): zoom = region['thn_zoom'] scale = 100 MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom, scale) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename+".gz", subset_file, shp_filename ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose )
mapbox_image(centerlat, centerlon, zoom, rasterXSize, rasterYSize, osm_bg_image) ullon, ullat, lrlon, lrlat = browseimage.Gen_bbox(centerlat, centerlon, zoom, rasterXSize, rasterYSize) dx = (lrlon-ullon)/rasterXSize dy = (ullat-lrlat)/rasterXSize im = Image.open(osm_bg_image) draw = ImageDraw.Draw(im) for f in features : coordinates = f['geometry']['coordinates'] lon = coordinates[0] lat = coordinates[1] x = int((lon-ullon)/dx) y = int((ullat-lat)/dx) draw.ellipse( [(x-1,y-1),(x+1,y+1)]) im.save(tif_filename, "PNG") cmd = "cp %s %s" % (tif_filename, thn_image) execute(cmd) file_list = [ geojson_filename, geojsongz_filename, thn_image ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose )
def process(mydir, gis_file, r, region, s3_bucket, s3_folder, ymd): # subset the file for that region bbox = region['bbox'] gis_file = os.path.join(mydir, "..", gis_file) local_dir = os.path.join(mydir, r) if not os.path.exists(local_dir): os.makedirs(local_dir) subset_file = os.path.join(local_dir, "vhi.%s.tif" % ymd) if force or not os.path.exists(subset_file): cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % ( bbox[0], bbox[1], bbox[2], bbox[3], gis_file, subset_file) execute(cmd) ds = gdal.Open(subset_file) geotransform = ds.GetGeoTransform() px = geotransform[1] / 10 py = geotransform[5] / 10 ds = None # upsample and convolve super_subset_file = os.path.join(local_dir, "vhi_super.%s.tif" % ymd) cmd = "gdalwarp -overwrite -q -r cubicspline -tr %s %s -te %f %f %f %f -co COMPRESS=LZW %s %s" % ( str(px), str(py), bbox[0], bbox[1], bbox[2], bbox[3], subset_file, super_subset_file) execute(cmd) geojsonDir = os.path.join(local_dir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(local_dir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "vhi.%s.geojson" % ymd) topojson_filename = os.path.join(geojsonDir, "..", "vhi.%s.topojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "vhi.%s_browse.tif" % ymd) subset_filename = os.path.join(geojsonDir, "..", "vhi.%s_small_browse.tif" % ymd) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "vhi.%s_thn.jpg" % ymd) levels = [100, 84, 72, 60, 48, 36, 24, 12, 6, 0] # From http://colorbrewer2.org/ hexColors = [ "#d53e4f", "#f46d43", "#fdae61", "#fee08b", "#ffffbf", "#e6f598", "#abdda4", "#66c2a5", "#3288bd" ] ds = gdal.Open(super_subset_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) data[data > 100] = 0 if force or not os.path.exists(topojson_filename + ".gz"): for idx, l in enumerate(levels): print "level", idx if idx < len(levels) - 1: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, levels[idx + 1], geojsonDir, fileName, ds, data, "vhi", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "vhi_level_%d.geojson" % l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep " + topojson_filename execute(cmd) zoom = region['thn_zoom'] if force or not os.path.exists(sw_osm_image): levels.pop() MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename + ".gz", subset_file ] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose)
def process(gpm_dir, name, gis_file_day, ymd ): global force, verbose regionName = 'global' region_dir = os.path.join(gpm_dir,regionName) if not os.path.exists(region_dir): os.makedirs(region_dir) origFileName = os.path.join(gpm_dir,gis_file_day) ds = gdal.Open(origFileName) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pixelsize = geotransform[1] xmax = xorg + geotransform[1]* ds.RasterXSize ymax = yorg - geotransform[1]* ds.RasterYSize bbox = [xorg, ymax, xmax, yorg] supersampled_file = os.path.join(region_dir, "%s.%s_x2.tif" % (name, ymd)) if force or not os.path.exists(supersampled_file): cmd = "gdalwarp -overwrite -q -tr %f %f -te %f %f %f %f -r cubicspline -co COMPRESS=LZW %s %s"%(pixelsize/2, pixelsize/2, bbox[0], bbox[1], bbox[2], bbox[3], origFileName, supersampled_file) execute(cmd) geojsonDir = os.path.join(region_dir,"geojson_%s" % (name)) if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(region_dir,"levels_%s" % (name)) if not os.path.exists(levelsDir): os.makedirs(levelsDir) shpDir = os.path.join(region_dir,"shp_%s" % (name)) cmd = "rm -rf " + shpDir execute(cmd) os.makedirs(shpDir) merge_filename = os.path.join(geojsonDir, "%s.%s.geojson" % (name, ymd)) topojson_filename = os.path.join(geojsonDir, "..", "%s.%s.topojson" % (name,ymd)) browse_filename = os.path.join(geojsonDir, "..", "%s.%s_browse.tif" % (name,ymd)) subset_aux_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif.aux.xml" % (name, ymd)) subset_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif" % (name, ymd)) #osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") osm_bg_image = os.path.join(config.data_dir, "gpm", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s.%s_thn.jpg" % (name, ymd)) tif_image = os.path.join(geojsonDir, "..", "%s.%s.tif" % (name, ymd)) geojson_filename = os.path.join(geojsonDir, "..", "%s.%s.json" % (name,ymd)) shapefile_gz = os.path.join(geojsonDir, "..", "%s.shp.gz" % name) shp_zip_file = os.path.join(geojsonDir, "..", "%s.shp.zip" % name) levels = [377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3, 2] # http://hclwizard.org/hcl-color-scheme/ # http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/ # from http://tristen.ca/hcl-picker/#/hlc/12/1/241824/55FEFF # Light to dark hexColors = [ "#56F6FC","#58DEEE","#5BC6DE","#5EAFCC","#5E99B8","#5D84A3","#596F8D","#535B77","#4A4861","#3F374B","#322737","#241824"] ds = gdal.Open( supersampled_file ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) sdata = data/10 # back to mm if force or not os.path.exists(topojson_filename+".gz"): for idx, l in enumerate(levels): print "level", idx #if idx < len(levels)-1: fileName = os.path.join(levelsDir, ymd+"_level_%d.tif"%l) #CreateLevel(l, levels[idx+1], geojsonDir, fileName, ds, sdata, "precip") CreateLevel(l, geojsonDir, fileName, ds, sdata, "precip") jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "precip_level_%d.geojson"%l) if os.path.exists(fileName): with open(fileName) as data_file: jdata = json.load(data_file) if 'features' in jdata: for f in jdata['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson --bbox -p precip -o "+ topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep "+ topojson_filename execute(cmd) # Convert to shapefile if 1: #and os.path.exists(merge_filename): cmd= "ogr2ogr -f 'ESRI Shapefile' %s %s" % ( shpDir, merge_filename) execute(cmd) if force or not os.path.exists(shp_zip_file): #cmd = "cd %s; tar -cvzf %s shp" %(region_dir, shapefile_gz) cmd = "cd %s; zip %s shp_%s/*" %(region_dir, shp_zip_file, name) execute(cmd) # problem is that we need to scale it or adjust the levels for coloring (easier) adjusted_levels = [3770, 2330, 1440, 890, 550, 340, 210, 130, 80, 50, 30, 20] zoom = 1 if force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, adjusted_levels, hexColors, force, verbose, zoom) if force or not os.path.exists(tif_image): cmd = "gdalwarp -overwrite -q -co COMPRESS=LZW %s %s"%( origFileName, tif_image) execute(cmd) ds = None file_list = [ sw_osm_image, topojson_filename+".gz", tif_image, shp_zip_file ] #CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ) CopyToS3( s3_bucket, s3_folder, file_list, 1, 1 ) if not verbose: # Cleanup cmd = "rm -rf %s %s %s %s %s %s %s %s %s %s %s" % (origFileName, supersampled_file, merge_filename, topojson_filename, subset_aux_filename, browse_filename, subset_filename, osm_bg_image, geojsonDir, levelsDir, shpDir) execute(cmd)
def process_file(filename, product, variable, wrf_mode, dt, region, hours, s3_bucket, s3_folder): if verbose: print "processing: " + filename + " product:", product, " variable:" + variable zoom = region['thn_zoom'] fpathname = os.path.dirname(filename) fdir = os.path.join(fpathname, dt, product) if not os.path.exists(fdir): os.makedirs(fdir) if force: cmd = "rm -rf " + fdir + "/*" execute(cmd) geojsonDir = os.path.join(fdir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(fdir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) levels = [377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3] colors = [ "#4D004A", "#810F7C", "#084081", "#0868ac", "#2b8cbe", "#4eb3d3", "#7bccc4", "#a8ddb5", "#ccebc5", "#e0f3db", "#f7fcf0" ] output_file = os.path.join(fdir, "%s.%s_MERC.tif" % (product, dt)) flipped_file = os.path.join(fdir, "%s.%s_MERC_flipped.tif" % (product, dt)) supersamp_file = os.path.join(fdir, "%s.%s_MERC_flipped_100.tif" % (product, dt)) reproj_file = os.path.join(fdir, "%s.%s.tif" % (product, dt)) reproj_rgb_file = os.path.join(fdir, "%s.%s_rgb.tif" % (product, dt)) color_file = os.path.join("cluts", "wrf_colors.txt") merge_filename = os.path.join(geojsonDir, "%s.%s.geojson" % (product, dt)) topojson_filename = os.path.join(geojsonDir, "..", "%s.%s.topojson" % (product, dt)) browse_filename = os.path.join(geojsonDir, "..", "%s.%s_browse.tif" % (product, dt)) subset_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif" % (product, dt)) subset_aux_filename = os.path.join( geojsonDir, "..", "%s.%s_small_browse.tif.aux" % (product, dt)) osm_bg_image = os.path.join(fdir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s.%s_thn.jpg" % (product, dt)) if force or not os.path.exists(output_file): grbs = pygrib.open(filename) grbvars = grbs.select(name=variable) count = 0 total = [] for grbvar in grbvars: grbstr = "%s" % grbvar arr = grbstr.split(':') # 1090:Total Precipitation:kg m**-2 (instant):mercator:surface:level 0:fcst time 1 hrs:from 201604241200 id = arr[0] name = arr[1] rate = arr[2] fcst = arr[6] dt = arr[7].replace('from ', '') if (rate.find('accum') > 0): count += 1 print id, name, rate, fcst, dt if count == 1: total = grbvar['values'] else: total += grbvar['values'] #lats, lons = p.latlons() if count > hours: break grbs.close() # Get critical info from GRIB file ds = gdal.Open(filename) ncols = ds.RasterXSize nrows = ds.RasterYSize geotransform = ds.GetGeoTransform() projection = ds.GetProjection() # Create gtif driver = gdal.GetDriverByName("GTiff") dst_ds = driver.Create(output_file, ncols, nrows, 1, gdal.GDT_Float32) band = dst_ds.GetRasterBand(1) band.WriteArray(total, 0, 0) dst_ds.SetGeoTransform(geotransform) dst_ds.SetProjection(projection) dst_ds = None ds = None # Flip it since it is bottom up if force or not os.path.exists(flipped_file): cmd = "flip_raster.py -o %s %s" % (flipped_file, output_file) execute(cmd) # Reproject to EPSG:4326 if force or not os.path.exists(reproj_file): cmd = "gdalwarp -q -t_srs EPSG:4326 " + flipped_file + " " + reproj_file execute(cmd) ds = gdal.Open(reproj_file) geotransform = ds.GetGeoTransform() pixelsize = geotransform[1] ds = None # Supersample and interpolate to make it output smoother if force or not os.path.exists(supersamp_file): cmd = "gdalwarp -q -multi -tr %f %f -r cubicspline %s %s" % ( pixelsize / 10, pixelsize / 10, reproj_file, supersamp_file) execute(cmd) # Color it using colormap if verbose and not os.path.exists(reproj_rgb_file): cmd = "gdaldem color-relief -q -alpha " + supersamp_file + " " + color_file + " " + reproj_rgb_file execute(cmd) ds = gdal.Open(supersamp_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pres = geotransform[1] xmax = xorg + geotransform[1] * ds.RasterXSize ymax = yorg - geotransform[1] * ds.RasterYSize if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, dt + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "precip", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "precip_level_%d.geojson" % l) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) if verbose: output = " " else: output = " > /dev/null 2>&1" # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename + output execute(cmd) if verbose: cmd = "gzip --keep -f " + topojson_filename else: cmd = "gzip -f " + topojson_filename execute(cmd) if not os.path.exists(osm_bg_image): ullat = yorg ullon = xorg lrlat = ymax lrlon = xmax wms(ullat, ullon, lrlat, lrlon, osm_bg_image) if force or not os.path.exists(sw_osm_image): scale = 2 MakeBrowseImage(ds, browse_filename, supersamp_file, osm_bg_image, sw_osm_image, levels, colors, force, verbose, zoom, scale) ds = None file_list = [sw_osm_image, topojson_filename + ".gz", reproj_file] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose) if not verbose: # Cleanup cmd = "rm -rf %s %s %s %s %s %s %s %s" % ( flipped_file, output_file, supersamp_file, browse_filename, subset_filename, subset_aux_filename, geojsonDir, levelsDir) execute(cmd)
def process(swe_dir, scene, s3_bucket, s3_folder): fullName = os.path.join(swe_dir, scene+".tif") if not os.path.exists(fullName): print "File does not exist", fullName sys.exit(-1) if verbose: print "Processing", fullName geojsonDir = os.path.join(swe_dir,"geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(swe_dir,"levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "%s_levels.geojson" % scene) topojson_filename = os.path.join(geojsonDir, "..", "%s_levels.topojson" % scene) browse_filename = os.path.join(geojsonDir, "..", "%s_browse.tif" % scene) subset_filename = os.path.join(geojsonDir, "..", "%s_small_browse.tif" % scene) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s_thn.jpg" % scene) levels = [144, 89, 55, 34, 21, 13, 8, 5, 3] # From http://colorbrewer2.org/ hexColors = ["#d73027","#f46d43","#fdae61", "#fee08b", "#ffffbf","#d9ef8b","#a6d96a","#66bd63","#1a9850"] ds = gdal.Open( fullName ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) if force or not os.path.exists(topojson_filename+".gz"): for l in levels: fileName = os.path.join(levelsDir, scene+"_level_%d.tif"%l) CreateLevel(l, geojsonDir, fileName, ds, data, "soil_moisture", force,verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "soil_moisture_level_%d.geojson"%l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o "+ topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep "+ topojson_filename execute(cmd) if force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image,levels, hexColors, force, verbose) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename+".gz", fullName ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose )
def process_trmm_region( self, dx ): region = config.regions[dx] bbox = region['bbox'] tzoom = region['tiles-zoom'] pixelsize = region['pixelsize'] thn_width = region['thn_width'] thn_height = region['thn_height'] bucketName = region['bucket'] if verbose: print "process_trmm_region:", dx, pixelsize static_file = os.path.join(config.data_dir,"trmm", dx, "%s_static.tiff" % (dx)) rgb_subset_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_rgb.tif" % (dx,self.ymd)) resampled_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_1km.tif" % (dx,self.ymd)) resampled_rgb_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_1km_rgb.tif" % (dx,self.ymd)) supersampled_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_100m.tif" % (dx,self.ymd)) supersampled_rgb_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_100m_rgb.tif" % (dx,self.ymd)) shp_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_1km.shp" % (dx,self.ymd)) geojson_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24_%s_%s_1km.geojson" % (dx,self.ymd)) subset_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24.%s.tif" % (self.ymd)) thumbnail_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24.%s_thn.jpg" % (self.ymd)) topojson_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24.%s.topojson" % (self.ymd)) topojson_gz_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24.%s.topojson.gz" % (self.ymd)) shp_gz_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24.%s.shp.gz" % (self.ymd)) shp_zip_file = os.path.join(config.data_dir,"trmm", dx, self.ymd, "trmm_24.%s.shp.zip" % (self.ymd)) merge_filename = os.path.join(config.data_dir,"trmm", dx, self.ymd, "geojson", "trmm_levels.geojson") self.process_trmm_region_subset(self.output_file_180, bbox, subset_file, self.color_file, rgb_subset_file) if force or not os.path.exists(resampled_file): self.process_trmm_region_subset(self.output_file_180, bbox, subset_file, self.color_file, rgb_subset_file) self.process_trmm_region_upsample(pixelsize, bbox, self.output_file_180, resampled_file, resampled_rgb_file) if force or not os.path.exists(thumbnail_file): self.process_trmm_region_thumbnail( rgb_subset_file, thn_width, thn_height, static_file, thumbnail_file) if force or not os.path.exists(topojson_gz_file): self.process_trmm_region_topojson( dx, subset_file, supersampled_file, supersampled_rgb_file, pixelsize, bbox, shp_file, geojson_file, topojson_file, topojson_gz_file ) # merge the trmm files cmd = "node trmm_merge.js "+dx+ " " + self.ymd self.execute(cmd) # Convert to shapefile #self.shpDir = os.path.join(config.data_dir,"trmm", dx, self.ymd, "shp") #cmd = "rm -rf " + self.shpDir #self.execute(cmd) #cmd= "ogr2ogr -f 'ESRI Shapefile' %s %s" % ( self.shpDir, merge_filename) #self.execute(cmd) #if force or not os.path.exists(shp_gz_file): # mydir = os.path.join(config.data_dir,"trmm", dx, self.ymd) # cmd = "cd %s; tar -cvzf %s shp" %(mydir, shp_gz_file) # self.execute(cmd) #if force or not os.path.exists(shp_zip_file): # mydir = os.path.join(config.data_dir,"trmm", dx, self.ymd) # cmd = "cd %s; zip %s shp/*" %(mydir, shp_zip_file) # self.execute(cmd) file_list = [ thumbnail_file, topojson_gz_file, subset_file ] #self.process_trmm_region_to_s3( dx, file_list) s3_folder = os.path.join("trmm_24", str(self.year), self.doy) region = config.regions[dx] s3_bucket = region['bucket'] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ) self.process_trmm_region_cleanup(dx)
def process_mcd45_file(mydir, dx, file_name, s3_bucket, s3_folder): if verbose: print "Processing:" + file_name region = config.regions[dx] assert (region) bbox = region['bbox'] localdir = os.path.join(mydir, dx) if not os.path.exists(localdir): os.makedirs(localdir) # Set file vars output_file = os.path.join(localdir, "burned_areas.%s_out.tif" % ymd) rgb_output_file = os.path.join(localdir, "burned_areas.%s_out_rgb.tif" % ymd) subset_file = os.path.join(localdir, "burned_areas.%s_subset.tif" % ymd) subset_rgb_file = os.path.join(localdir, "burned_areas.%s_subset_rgb.tif" % ymd) color_file = os.path.join("cluts", "mcd45_colors.txt") resampled_file = os.path.join(localdir, "burned_areas.%s_resampled.tif" % ymd) resampled_rgb_file = os.path.join( localdir, "burned_areas.%s_resampled_rgb.tif" % ymd) bmp_file = os.path.join(localdir, "burned_areas.%s.bmp" % ymd) geojson_file = os.path.join(localdir, "burned_areas.%s.geojson" % ymd) topojson_file = os.path.join(localdir, "burned_areas.%s.topojson" % ymd) topojsongz_file = os.path.join(localdir, "burned_areas.%s.topojson.gz" % ymd) sw_osm_image = os.path.join(localdir, "burned_areas.%s_thn.jpg" % ymd) osm_bg_image = os.path.join(localdir, "osm_bg.png") browse_filename = os.path.join(localdir, "burned_areas.%s_browse.tif" % ymd) small_browse_filename = os.path.join( localdir, "burned_areas.%s_small_browse.tif" % ymd) # subset it to our BBOX # use ullr if force or not os.path.exists(subset_file): lonlats = "" + str(bbox[0]) + " " + str(bbox[3]) + " " + str( bbox[2]) + " " + str(bbox[1]) cmd = "gdal_translate -projwin " + lonlats + " " + file_name + " " + subset_file execute(cmd) # color it using colormap if force or not os.path.exists(resampled_rgb_file): cmd = "gdaldem color-relief -alpha " + subset_file + " " + color_file + " " + resampled_rgb_file execute(cmd) if force or not os.path.exists(bmp_file): cmd = "gdal_translate -b 1 -of BMP -ot Byte %s %s" % ( resampled_rgb_file, bmp_file) execute(cmd) execute("rm -f " + bmp_file + ".aux.xml") ds = gdal.Open(resampled_rgb_file) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] res = geotransform[1] xmax = geotransform[0] + ds.RasterXSize * geotransform[1] ymax = geotransform[3] + ds.RasterYSize * geotransform[5] if force or not os.path.exists(geojson_file): cmd = str.format( "potrace -z black -a 1.5 -t 1 -i -b geojson -o {0} {1} -x {2} -L {3} -B {4} ", geojson_file, bmp_file, res, xorg, ymax) execute(cmd) if force or not os.path.exists(topojson_file): cmd = str.format( "topojson --bbox --simplify-proportion 0.5 {0} -o {1} ", geojson_file, topojson_file) execute(cmd) if force or not os.path.exists(topojsongz_file): # compress topojson without all the directories cmd = str.format("gzip --keep {0} ", topojson_file) execute(cmd) zoom = region['thn_zoom'] levels = [365, 0] hexColors = ["#990066", "#ff0000"] if force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, small_browse_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom) file_list = [ sw_osm_image, topojson_file, topojson_file + ".gz", subset_file ] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose) ds = None
def process(mydir, gis_file, regionName, region, subfolder, s3_bucket, s3_folder, ymd): # subset the file for that region bbox = region['bbox'] subset_file = os.path.join(mydir, "%s.%s.tif" % (subfolder, ymd)) print "subset_file", subset_file if force or not os.path.exists(subset_file): cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % ( bbox[0], bbox[1], bbox[2], bbox[3], gis_file, subset_file) execute(cmd) geojsonDir = os.path.join(mydir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "%s.%s.geojson" % (subfolder, ymd)) topojson_filename = os.path.join(geojsonDir, "..", "%s.%s.topojson" % (subfolder, ymd)) browse_filename = os.path.join(geojsonDir, "..", "%s.%s_browse.tif" % (subfolder, ymd)) subset_filename = os.path.join(geojsonDir, "..", "%s.%s_small_browse.tif" % (subfolder, ymd)) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s.%s_thn.jpg" % (subfolder, ymd)) ds = gdal.Open(subset_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] pres = geotransform[1] xmax = xorg + geotransform[1] * ds.RasterXSize ymax = yorg - geotransform[1] * ds.RasterYSize if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "precip", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "precip_level_%d.geojson" % l) if os.path.exists(fileName): print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o " + topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep " + topojson_filename execute(cmd) if not os.path.exists(osm_bg_image): ullat = yorg ullon = xorg lrlat = ymax lrlon = xmax wms(ullat, ullon, lrlat, lrlon, osm_bg_image) if force or not os.path.exists(sw_osm_image): zoom = region['thn_zoom'] MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename + ".gz", subset_file ] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose)
def process_file(mydir, filename, s3_bucket, s3_folder): print "Processing", filename geojsonDir = os.path.join(mydir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) shpDir = os.path.join(mydir, "shp") if not os.path.exists(shpDir): os.makedirs(shpDir) super_subset_file = os.path.join(mydir, "geos5_precip_super.%s.tif" % ymd) merge_filename = os.path.join(geojsonDir, "geos5_precip.%s.geojson" % ymd) topojson_filename = os.path.join(geojsonDir, "..", "geos5_precip.%s.topojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "geos5_precip.%s_browse.tif" % ymd) subset_filename = os.path.join(geojsonDir, "..", "geos5_precip.%s_small_browse.tif" % ymd) subset_aux_filename = os.path.join( geojsonDir, "..", "geos5_precip.%s_small_browse.tif.aux.xml" % ymd) osm_bg_image = os.path.join(geojsonDir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "geos5_precip.%s_thn.jpg" % ymd) shp_filename = os.path.join(mydir, "geos5_precip.%s.shp.gz" % (ymd)) json_filename = os.path.join(geojsonDir, "geos5_precip.%s.json" % (ymd)) shp_zip_file = os.path.join(mydir, "geos5_precip.%s.shp.zip" % (ymd)) #if force or not os.path.exists(subset_file): # cmd = "gdalwarp -overwrite -q -te %f %f %f %f %s %s" % (bbox[0], bbox[1], bbox[2], bbox[3], filename, subset_file) # execute(cmd) ds = gdal.Open(filename) geotransform = ds.GetGeoTransform() px = geotransform[1] / 5 py = geotransform[5] / 5 ds = None # upsample and convolve if force or not os.path.exists(super_subset_file): # we need to have square pixels cmd = "gdalwarp -overwrite -q -r cubicspline -tr %s %s -co COMPRESS=LZW %s %s" % ( str(px), str(px), filename, super_subset_file) execute(cmd) levels = [377, 233, 144, 89, 55, 34, 21, 13, 8, 5, 3, 2] # http://hclwizard.org/hcl-color-scheme/ # http://vis4.net/blog/posts/avoid-equidistant-hsv-colors/ # from http://tristen.ca/hcl-picker/#/hlc/12/1/241824/55FEFF hexColors = [ "#56F6FC", "#58DEEE", "#5BC6DE", "#5EAFCC", "#5E99B8", "#5D84A3", "#596F8D", "#535B77", "#4A4861", "#3F374B", "#322737", "#241824" ] ds = gdal.Open(super_subset_file) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) if force or not os.path.exists(topojson_filename + ".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "geos5_precip", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "geos5_precip_level_%d.geojson" % l) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p --bbox --simplify-proportion 0.5 -o " + topojson_filename + " " + merge_filename execute(cmd) cmd = "gzip --keep " + topojson_filename execute(cmd) # Create shapefile gz if force or not os.path.exists(shp_filename): # Convert simplified topojson to geojson cmd = "topojson-geojson --precision 4 %s -o %s" % (topojson_filename, geojsonDir) execute(cmd) cmd = "ogr2ogr -f 'ESRI Shapefile' %s %s" % (shpDir, json_filename) execute(cmd) #cmd = "cd %s; tar -zcvf %s %s" % (mydir, shp_filename, shpDir) cmd = "cd %s; zip %s shp/*" % (mydir, shp_zip_file) execute(cmd) if force or not os.path.exists(sw_osm_image): zoom = 1 scale = 1 MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom, scale) ds = None file_list = [ sw_osm_image, topojson_filename, topojson_filename + ".gz", filename, shp_zip_file ] CopyToS3(s3_bucket, s3_folder, file_list, 1, 1) if not verbose: # Cleanup cmd = "rm -rf %s %s %s %s %s %s %s %s %s %s" % ( merge_filename, browse_filename, topojson_filename, subset_filename, super_subset_file, osm_bg_image, subset_aux_filename, geojsonDir, levelsDir, shpDir) execute(cmd)
def process(mydir, scene, s3_bucket, s3_folder, zoom, bbox): global verbose, force fullName = os.path.join(mydir, scene+".tif") if not os.path.exists(fullName): print "File does not exist", fullName sys.exit(-1) if verbose: print "Processing", fullName geojsonDir = os.path.join(mydir,"geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir,"levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) #shpDir = os.path.join(mydir,"shp") #cmd = "rm -rf " + shpDir #execute(cmd) #os.makedirs(shpDir) merge_filename = os.path.join(geojsonDir, "%s.geojson" % scene) topojson_filename = os.path.join(mydir, "%s.topojson" % scene) browse_filename = os.path.join(mydir, "%s_browse.tif" % scene) subset_filename = os.path.join(mydir, "%s_small_browse.tif" % scene) osm_bg_image = os.path.join(mydir, "..", "osm_bg.png") sw_osm_image = os.path.join(mydir, "%s_thn.jpg" % scene) #shapefile_gz = os.path.join(mydir, "%s.shp.gz" % scene) levels = [2,1] # From http://colorbrewer2.org/ #hexColors = ["#ffffe5", "#feb24c","#f03b20"] hexColors = ["#feb24c","#f03b20"] ds = gdal.Open( fullName ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) geotransform = ds.GetGeoTransform() xorg = geotransform[0] yorg = geotransform[3] xmax = xorg + geotransform[1]* ds.RasterXSize ymax = yorg + geotransform[5]* ds.RasterYSize #print ymax, xorg, yorg, xmax if force or not os.path.exists(topojson_filename+".gz"): for l in levels: fileName = os.path.join(levelsDir, scene+"_level_%d.tif"%l) CreateLevel(l, geojsonDir, fileName, ds, data, "landslide_nowcast", force,verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "landslide_nowcast_level_%d.geojson"%l) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o "+ topojson_filename + " " + merge_filename + " > /dev/null 2>&1" execute(cmd) cmd = "gzip -f --keep "+ topojson_filename execute(cmd) # Convert to shapefile #if force or not os.path.exists(shpDir) and os.path.exists(merge_filename): # cmd= "ogr2ogr -f 'ESRI Shapefile' %s %s" % ( shpDir, merge_filename) # execute(cmd) #if force or not os.path.exists(shapefile_gz): # cmd = "cd %s; tar -cvzf %s shp" %(mydir, shapefile_gz) # execute(cmd) if not os.path.exists(osm_bg_image): ullat = bbox[3] ullon = bbox[0] lrlat = bbox[1] lrlon = bbox[2] print "wms", ullat, ullon, lrlat, lrlon wms(ullat, ullon, lrlat, lrlon, osm_bg_image) if force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image,levels, hexColors, force, verbose, zoom) ds = None file_list = [ sw_osm_image, topojson_filename+".gz", fullName] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ) if not verbose: cmd = "rm -f %s %s %s %s" %( subset_filename, subset_filename+".aux.xml", browse_filename,topojson_filename ) execute(cmd) #cmd = "rm -rf "+shpDir #execute(cmd) cmd = "rm -rf "+levelsDir execute(cmd) cmd = "rm -rf "+geojsonDir execute(cmd) fpath = os.path.join(config.data_dir,"landslide_nowcast", region, ymd) cmd = "rm -rf " + os.path.join(fpath,"iabr*") execute(cmd) cmd = "rm -rf " + os.path.join(fpath,"rr_*") execute(cmd) cmd = "rm -rf " + os.path.join(fpath,"step_*") execute(cmd)
def process_lowres(self): name = "flood_14km" input_fname = "Flood_byStor_%s%02d00.bin" % (ym, day) input_fullname = os.path.join(self.inpath, "gfms", ymd, input_fname) output_fname = "%s.%s%02d.tif" % (name, ym, day) output_fullname = os.path.join(self.inpath, "gfms", ymd, output_fname) super_fname = "%s.%s%02d.x2.tif" % (name, ym, day) super_fullname = os.path.join(self.inpath, "gfms", ymd, super_fname) super_fname_rgb = "%s.%s%02d.x2.rgb.tif" % (name, ym, day) super_fullname_rgb = os.path.join(self.inpath, "gfms", ymd, super_fname_rgb) geojson_fname = "%s.%s%02d.geojson" % (name, ym, day) geojson_fullname = os.path.join(self.inpath, "gfms", ymd, geojson_fname) topojson_fname = "%s.%s%02d.topojson" % (name, ym, day) topojson_fullname = os.path.join(self.inpath, "gfms", ymd, topojson_fname) topojson_fullname_gz = topojson_fullname + ".gz" shp_gz_file = os.path.join(self.inpath, "gfms", ymd, "%s.%s%02d.shp.gz" % (name, ym, day)) shp_zip_file = os.path.join(self.inpath, "gfms", ymd, "%s.%s%02d.shp.zip" % (name, ym, day)) output_rgb_fname = "%s.%s%02d_rgb.tif" % (name, ym, day) output_rgb_fullname = os.path.join(self.inpath, "gfms", ymd, output_rgb_fname) color_file = os.path.join("cluts", "gfms_colors.txt") flood_dir = os.path.join(self.inpath, "gfms", ymd) geojsonDir = os.path.join(flood_dir, "geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(flood_dir, "levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) merge_filename = os.path.join(geojsonDir, "%s_levels.geojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "%s_browse.tif" % ymd) browse_aux_filename = os.path.join(geojsonDir, "..", "%s_small_browse.tif.aux.xml" % ymd) subset_filename = os.path.join(geojsonDir, "..", "%s_small_browse.tif" % ymd) osm_bg_image = os.path.join(flood_dir, "..", "osm_bg.png") sw_osm_image = os.path.join(geojsonDir, "..", "%s.%s%02d_thn.jpg" % (name, ym, day)) x = -127.25 y = 50 res = 0.125 if self.force or not os.path.exists(output_fullname): rows = 800 cols = 2458 size = rows * cols if verbose: print "gfms processing:", input_fullname fd = open(input_fullname, 'rb') shape = (rows, cols) data = numpy.fromfile(file=fd, dtype=numpy.float32, count=size).reshape(shape) data[data < 0] = 0 #PGC #print "stats:", data.size, data.min(), data.mean(), data.max(), data.std() # Create gtif driver = gdal.GetDriverByName("GTiff") #dst_ds = driver.Create(output_fullname, cols, rows, 1, gdal.GDT_Float32) dst_ds = driver.Create(output_fullname, cols, rows, 1, gdal.GDT_Byte) # top left x, w-e pixel resolution, rotation, top left y, rotation, n-s pixel resolution dst_ds.SetGeoTransform([x, res, 0, y, 0, -res]) # set the reference info srs = osr.SpatialReference() srs.ImportFromEPSG(4326) dst_ds.SetProjection(srs.ExportToWkt()) # write the band band = dst_ds.GetRasterBand(1) #band.SetNoDataValue(-9999) band.WriteArray(data) dst_ds = None # Supersample it scale = 2 if self.force or not os.path.exists(super_fullname): cmd = "gdalwarp -overwrite -q -tr %f %f -r mode %s %s" % ( res / scale, res / scale, output_fullname, super_fullname) self.execute(cmd) # Create RGB if self.verbose and (self.force or not os.path.exists(output_rgb_fullname)): cmd = "gdaldem color-relief -q -alpha " + output_fullname + " " + color_file + " " + output_rgb_fullname self.execute(cmd) if self.verbose and (self.force or not os.path.exists(super_fullname_rgb)): cmd = "gdaldem color-relief -q -alpha " + super_fullname + " " + color_file + " " + super_fullname_rgb self.execute(cmd) # json #if self.force or not os.path.exists(geojson_fullname): # cmd = "makesurface vectorize --classfile gmfs_classes.csv --outfile %s --outvar flood %s " %( geojson_fullname, super_fullname) # self.execute(cmd) # topojson #if self.force or not os.path.exists(topojson_fullname): # cmd = "topojson --simplify-proportion 0.75 --bbox -p risk -o %s -- flood_24hr_forecast=%s" % (topojson_fullname, geojson_fullname ) # self.execute(cmd) #if self.force or not os.path.exists(topojson_fullname_gz): # cmd = "gzip --keep %s" % (topojson_fullname) # self.execute(cmd) levels = [200, 100, 50, 20, 10, 1] hexColors = [ "#FF0000", "#FFA500", "#FFD700", "#0000FF", "#00BFFF", "#00FF00" ] ds = gdal.Open(super_fullname) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize) if self.force or not os.path.exists(topojson_fullname + ".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd + "_level_%d.tif" % l) CreateLevel(l, geojsonDir, fileName, ds, data, "flood", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "flood_level_%d.geojson" % l) if os.path.exists(fileName): # print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson quiet = " > /dev/null 2>&1" if verbose: quiet = " " cmd = "topojson -p -o " + topojson_fullname + " " + merge_filename + quiet self.execute(cmd) if verbose: cmd = "gzip -f --keep " + topojson_fullname else: cmd = "gzip -f " + topojson_fullname self.execute(cmd) #cmd= "ogr2ogr -f 'ESRI Shapefile' %s %s" % ( shpDir, merge_filename) #self.execute(cmd) #if force or not os.path.exists(shp_zip_file): # mydir = os.path.join(self.inpath, "gfms", ymd) # #cmd = "cd %s; tar -cvzf %s shp" %(mydir, shp_gz_file) # cmd = "cd %s; zip %s shp/*" %(mydir, shp_zip_file) # self.execute(cmd) if not os.path.exists(osm_bg_image): wms(y, x, -y, 180, osm_bg_image) if self.force or not os.path.exists(sw_osm_image): MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, hexColors, force, verbose, zoom=2) file_list = [sw_osm_image, topojson_fullname_gz, output_fullname] CopyToS3(s3_bucket, s3_folder, file_list, force, verbose) if not self.verbose: cmd = "rm -rf %s %s %s %s %s %s %s %s" % ( browse_filename, input_fullname, subset_filename, super_fullname, output_rgb_fullname, browse_aux_filename, levelsDir, geojsonDir) self.execute(cmd)
def process(_dir, files, ymd): weights = [ 1.00000000, 0.25000000, 0.11111111, 0.06250000, 0.04000000, 0.02777778, 0.02040816] sum = 1.511797 i = 0 global total for f in files: if f.find(".1day.tif")>0: fname = os.path.join(_dir, f) ds = gdal.Open( fname ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) if verbose: print "Process", i, weights[i], f, ds.RasterXSize, ds.RasterYSize # data in 10 * mm if i == 0: total = data total /= 10.0 else: total += data * weights[i] / 10.0 i += 1 ds = None total /= sum # Get ARI files ARI90 = os.path.join(config.data_dir, "ant_r", "ARI90.tif") ARI95 = os.path.join(config.data_dir, "ant_r", "ARI95.tif") ds_90 = gdal.Open( ARI90 ) band_90 = ds_90.GetRasterBand(1) ndata = band_90.GetNoDataValue() print ndata data_90 = band_90.ReadAsArray(0, 0, ds_90.RasterXSize, ds_90.RasterYSize ).astype(numpy.float) ds_95 = gdal.Open( ARI95 ) band_95 = ds_95.GetRasterBand(1) data_95 = band_95.ReadAsArray(0, 0, ds_95.RasterXSize, ds_95.RasterYSize ).astype(numpy.float) total[total<=data_90] = 0 total[total>data_95] = 2 total[total>data_90] = 1 total[data_90 < 0] = 0 fname = os.path.join(_dir, "total.tif") if force or not os.path.exists(fname): save_tif(fname, total, ds_95, gdal.GDT_Byte, 1) dst_ds = None ds_90 = None ds_95 = None # Get susmap susmap = os.path.join(config.data_dir, "susmap.2", "global.tif") ds2 = gdal.Open( susmap ) band_2 = ds2.GetRasterBand(1) data_2 = band_2.ReadAsArray(0, 0, ds2.RasterXSize, ds2.RasterYSize ) # Supersample it to 1km fname_1km = os.path.join(_dir, "total_1km.tif") if force or not os.path.exists(fname_1km): if verbose: print "supersampling to 1km..." #cmd = "gdal_translate -outsize 43167 21600 -co 'COMPRESS=DEFLATE' %s %s" % (fname, fname_1km) cmd = "gdalwarp -q -overwrite -co 'COMPRESS=DEFLATE' -ts %d %d %s %s" % (ds2.RasterXSize, ds2.RasterYSize, fname, fname_1km) if force or not os.path.exists(fname_1km): execute(cmd) # Get susceptibility map if verbose: print "checking against susceptibility map..." ds1 = gdal.Open( fname_1km ) band_1 = ds1.GetRasterBand(1) data_1 = band_1.ReadAsArray(0, 0, ds1.RasterXSize, ds1.RasterYSize ) data_1[data_2<2] = 0 fname_1km_final = os.path.join(_dir, "global_landslide_nowcast_%s.tif"%ymd) if force or not os.path.exists(fname_1km_final): save_tif(fname_1km_final, data_1, ds2, gdal.GDT_Byte, 1) levels = [2,1] hexColors = ["#feb24c","#f03b20"] geojsonDir = os.path.join(_dir,"geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(_dir,"levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) topojson_filename = os.path.join(_dir, "global_landslide_nowcast_%s.topojson" % ymd) merge_filename = os.path.join(geojsonDir, "global_landslide_nowcast_%s.geojson" % ymd) attr = "nowcast" if force or not os.path.exists(topojson_filename+".gz"): for l in levels: fileName = os.path.join(levelsDir, "global_level_%d.tif"%l) CreateLevel(l, geojsonDir, fileName, ds1, data_1, attr, force,verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "%s_level_%d.geojson"%(attr,l)) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson cmd = "topojson -p -o "+ topojson_filename + " " + merge_filename + " > /dev/null 2>&1" execute(cmd) cmd = "gzip -f --keep "+ topojson_filename execute(cmd) osm_bg_image = os.path.join(_dir, "..", "osm_bg.png") sw_osm_image = os.path.join(_dir, "global_landslide_nowcast_%s_thn.jpg" % ymd) browse_filename = os.path.join(geojsonDir, "global_browse_%s.tif" % ymd) subset_filename = os.path.join(geojsonDir, "global.%s_small_browse.tif" % (ymd)) transparent = os.path.join(geojsonDir, "global.%s_small_browse_transparent.tif" % (ymd)) if not os.path.exists(osm_bg_image): ullat = 85 ullon = -180 lrlat = -85 lrlon = 180 print "wms", ullat, ullon, lrlat, lrlon wms(ullat, ullon, lrlat, lrlon, osm_bg_image) #if force or not os.path.exists(sw_osm_image): # MakeBrowseImage(ds1, browse_filename, subset_filename, osm_bg_image, sw_osm_image,levels, hexColors, force, verbose, zoom) # Make a small browse image if force or not os.path.exists(browse_filename): cmd = "gdalwarp -q -te -180 -85 180 85 -tr 0.5 0.5 %s %s" %(fname_1km_final, browse_filename) execute(cmd) if force or not os.path.exists(transparent): cmd = "convert %s -transparent black %s" %(browse_filename, transparent) execute(cmd) if force or not os.path.exists(sw_osm_image): cmd = "composite -quiet -gravity center -blend 60 %s %s %s" %( transparent, osm_bg_image, sw_osm_image) execute(cmd) ds1 = None ds2 = None file_list = [ sw_osm_image, topojson_filename+".gz", fname_1km_final ] CopyToS3( s3_bucket, s3_folder, file_list, 1, 1 ) if not verbose: # Cleanup gpm_files = os.path.join(_dir, "3B-HHR*") cmd = "rm -rf %s %s %s %s %s %s" % (gpm_files, fname_1km, fname, topojson_filename, geojsonDir, levelsDir) execute(cmd)
def process_file( mydir, filename, s3_bucket, s3_folder, bbox, regionName): global force, verbose print "Processing", filename geojsonDir = os.path.join(mydir,"geojson") if not os.path.exists(geojsonDir): os.makedirs(geojsonDir) levelsDir = os.path.join(mydir,"levels") if not os.path.exists(levelsDir): os.makedirs(levelsDir) super_subset_file = os.path.join(mydir, "geos5_sat_super.%s.tif" % ymd) merge_filename = os.path.join(geojsonDir, "geos5_sat.%s.geojson" % ymd) topojson_filename = os.path.join(geojsonDir, "..", "geos5_sat.%s.topojson" % ymd) browse_filename = os.path.join(geojsonDir, "..", "geos5_sat.%s_browse.tif" % ymd) subset_filename = os.path.join(geojsonDir, "..", "geos5_sat.%s_small_browse.tif" % ymd) subset_aux_filename = os.path.join(geojsonDir, "..", "geos5_sat.%s_small_browse.tif.aux.xml" % ymd) osm_bg_image = os.path.join(mydir, "..", "osm_bg_%s.png" % regionName ) sw_osm_image = os.path.join(geojsonDir, "..", "geos5_sat.%s_thn.jpg" % ymd) json_filename = os.path.join(geojsonDir, "geos5_sat.%s.json" % (ymd)) ds = gdal.Open( filename ) geotransform = ds.GetGeoTransform() px = geotransform[1] / 10 py = geotransform[5] / 10 xorg = geotransform[0] yorg = geotransform[3] xmax = xorg + geotransform[1]* ds.RasterXSize ymax = yorg + geotransform[5]* ds.RasterYSize #print ymax, xorg, yorg, xmax ds = None # upsample and convolve if force or not os.path.exists(super_subset_file): # we need to have square pixels cmd = "gdalwarp -overwrite -q -r cubic -tr %s %s -co COMPRESS=DEFLATE %s %s" % (str(px), str(py), filename, super_subset_file) execute(cmd) levels = [5, 4, 3, 2] hexColors = [ "ff9a00", "ff0000", "ff99cc", "cc00cc" ] ds = gdal.Open( super_subset_file ) band = ds.GetRasterBand(1) data = band.ReadAsArray(0, 0, ds.RasterXSize, ds.RasterYSize ) if force or not os.path.exists(topojson_filename+".gz"): for l in levels: fileName = os.path.join(levelsDir, ymd+"_level_%d.tif"%l) CreateLevel(l, geojsonDir, fileName, ds, data, "geos5_sat", force, verbose) jsonDict = dict(type='FeatureCollection', features=[]) for l in reversed(levels): fileName = os.path.join(geojsonDir, "geos5_sat_level_%d.geojson"%l) if os.path.exists(fileName): if verbose: print "merge", fileName with open(fileName) as data_file: data = json.load(data_file) if 'features' in data: for f in data['features']: jsonDict['features'].append(f) with open(merge_filename, 'w') as outfile: json.dump(jsonDict, outfile) # Convert to topojson quiet = "> /dev/null 2>&1" if verbose: quiet = " " cmd = "topojson -p --bbox --simplify-proportion 0.1 -o "+ topojson_filename + " " + merge_filename + quiet execute(cmd) cmd = "gzip -f "+ topojson_filename execute(cmd) if not os.path.exists(osm_bg_image): ullat = bbox[3] ullon = bbox[0] lrlat = bbox[1] lrlon = bbox[2] wms(ullat, ullon, lrlat, lrlon, osm_bg_image) if force or not os.path.exists(sw_osm_image): zoom = 1 scale = 1 rColors = list(reversed(hexColors)) MakeBrowseImage(ds, browse_filename, subset_filename, osm_bg_image, sw_osm_image, levels, rColors, force, verbose, zoom, scale) ds = None file_list = [ sw_osm_image, topojson_filename+".gz", filename ] CopyToS3( s3_bucket, s3_folder, file_list, force, verbose ) if not verbose: # Cleanup cmd = "rm -rf %s %s %s %s %s %s %s %s" % ( merge_filename, browse_filename, topojson_filename, subset_filename, super_subset_file, subset_aux_filename, geojsonDir, levelsDir) execute(cmd)