def get_topo(output_dir=None, force=False, plot=False, verbose=False): """Retrieve ETOPO1 data from NOAA""" if output_dir is None: output_dir = os.getcwd() strips = [-45.0, -30.0, -15.0, 0.0, 15.0, 30.0] for (i, lower_bound) in enumerate(strips): file_name = "strip%s.nc" % i URL = form_etopo_URL([-180, lower_bound], [180, lower_bound + 15.0], file_name=file_name) file_path = os.path.join(output_dir, file_name) if os.path.exists(file_path) and (not force): print("Skipping download... file already exists: ", file_path) else: data.get_remote_file(URL, output_dir=output_dir, file_name=file_name, verbose=verbose, force=force) # TODO: Check output for errors if plot: fig = plt.figure() axes = fig.add_subplot(1, 1, 1) for i in range(len(strips)): topo = topotools.Topography( path=os.path.join(output_dir, "strip%s.nc" % i)) topo.read(stride=[100, 100]) topo.plot(axes=axes) plt.show()
def __download_topo__(): """ Retrieves necessary topography files from the clawpack site """ from clawpack.geoclaw import topotools from clawpack.clawutil.data import get_remote_file topo_dir = os.path.join(DATA_DIR, 'topo') print 'Downloading topo data to {}'.format(topo_dir) baseurl = 'http://depts.washington.edu/clawpack/geoclaw/topo/' etopo_fname = 'etopo1-230250035050.asc' etopo_url = baseurl + 'etopo/' + etopo_fname grays_harbor_fname = 'N_GraysHarbor_1_3sec.tt3' grays_harbor_url = baseurl + 'WA/' + grays_harbor_fname #retrieve files get_remote_file(grays_harbor_url, output_dir=topo_dir, file_name=grays_harbor_fname) get_remote_file(etopo_url, output_dir=topo_dir, file_name=etopo_fname) #invert the etopofile etopo = topotools.Topography(os.path.join(topo_dir, etopo_fname)) etopo.Z = -etopo.Z etopo.write(os.path.join(topo_dir,'etopo1.tt3'), topo_type=3)
def get_topo(plot=False): """Retrieve the topo file from the GeoClaw repository.""" # Fetch topography base_url = "https://dl.dropboxusercontent.com/u/8449354/bathy/" urls = [ os.path.join(base_url, "gulf_caribbean.tt3.tar.bz2"), os.path.join(base_url, "NOAA_Galveston_Houston.tt3.tar.bz2"), os.path.join(base_url, "galveston_tx.asc.tar.bz2") ] for url in urls: data.get_remote_file(url, verbose=True) # Plot if requested if plot: import matplotlib.pyplot as plt scratch_dir = os.path.join(os.environ.get("CLAW", os.getcwd()), 'geoclaw', 'scratch') for topo_name in [ 'gulf_caribbean.tt3', 'NOAA_Galveston_Houston.tt3', 'galveston_tx.asc' ]: topo_path = os.path.join(scratch_dir, topo_name) topo = topotools.Topography(topo_path, topo_type=3) topo.plot() fname = os.path.splitext(topo_name)[0] + '.png' plt.savefig(fname)
def __download_dtopo__(): """ Retrieves necessary earthquake source files from the clawpack site """ from clawpack.clawutil.data import get_remote_file dtopo_fname = 'CSZ_L1.tt3' url = 'http://www.geoclaw.org/dtopo/CSZ/' + dtopo_fname dtopo_dir = os.path.join(DATA_DIR, 'dtopo') print 'Downloading dtopo data to {}'.format(dtopo_dir) #retrieve file get_remote_file(url, output_dir=dtopo_dir, file_name=dtopo_fname)
def get_topo(): """ Retrieve the topo files from NOAA. """ base_url = 'https://gis.ngdc.noaa.gov/mapviewer-support/wcs-proxy/wcs.groovy' claw_dir = os.environ['CLAW'] scratch_dir = os.path.join(claw_dir, 'geoclaw', 'scratch') # Specify topo parameters as tuples of the form # (res_mins, lat_min, lat_max, lon_min, lon_max) topo_params = [(10, 5, 35, -100, -70), # Gulf (1, 28, 31, -92.5, -87.5)] # New Orleans topo_files = [] for (res_mins, lat_min, lat_max, lon_min, lon_max) in topo_params: # Construct output file name lat_lon = '{}{}_{}{}_{}{}_{}{}'.format(abs(lat_min), 'N' if lat_min >= 0 else 'S', abs(lat_max), 'N' if lat_max >= 0 else 'S', abs(lon_min), 'E' if lon_min >= 0 else 'W', abs(lon_max), 'E' if lon_max >= 0 else 'W') topo_fname = 'etopo1_{}m_{}.nc'.format(res_mins, lat_lon) topo_files.append(os.path.join(scratch_dir, topo_fname)) # Fetch topography # Note: We manually create the query string because using 'urlencode' # causes an internal server error. res_hrs = res_mins / 60.0 url_params = { 'filename': 'etopo1.nc', 'request': 'getcoverage', 'version': '1.0.0', 'service': 'wcs', 'coverage': 'etopo1', 'CRS': 'EPSG:4326', 'format': 'netcdf', 'resx': '{:.18f}'.format(res_hrs), 'resy': '{:.18f}'.format(res_hrs), 'bbox': '{:.14f},{:.14f},{:.14f},{:.14f}'.format(lon_min, lat_min, lon_max, lat_max) } query_str = '&'.join('{}={}'.format(k, v) for k, v in url_params.items()) full_url = '{}?{}'.format(base_url, query_str) data.get_remote_file(full_url, file_name=topo_fname, verbose=True) return topo_files
def get_topo(makeplots=False): """ Retrieve the topo file from the GeoClaw repository. """ from clawpack.geoclaw import topotools import clawpack.clawutil.data as data topo_fname = 'etopo10min120W60W60S0S.asc' url = 'http://www.geoclaw.org/topo/etopo/' + topo_fname data.get_remote_file(url, output_dir=scratch_dir, file_name=topo_fname, verbose=True) if makeplots: from matplotlib import pyplot as plt topo = topotools.Topography(os.path.join(scratch_dir,topo_fname), topo_type=2) topo.plot() fname = os.path.splitext(topo_fname)[0] + '.png' plt.savefig(fname) print "Created ",fname
def get_topo(makeplots=False): """ Retrieve the topo file from the GeoClaw repository. """ from clawpack.geoclaw import topotools import clawpack.clawutil.data as data topo_fname = 'etopo10min120W60W60S0S.asc' url = 'http://www.geoclaw.org/topo/etopo/' + topo_fname data.get_remote_file(url, output_dir=scratch_dir, file_name=topo_fname, verbose=True) if makeplots: from matplotlib import pyplot as plt topo = topotools.Topography(os.path.join(scratch_dir, topo_fname), topo_type=2) topo.plot() fname = os.path.splitext(topo_fname)[0] + '.png' plt.savefig(fname) print "Created ", fname
def get_topo(plot=False): """Retrieve the topo file from the GeoClaw repository.""" # Fetch topography base_url = "https://dl.dropboxusercontent.com/u/8449354/bathy/" urls = [os.path.join(base_url, "gulf_caribbean.tt3.tar.bz2"), os.path.join(base_url, "NOAA_Galveston_Houston.tt3.tar.bz2"), os.path.join(base_url, "galveston_tx.asc.tar.bz2")] for url in urls: data.get_remote_file(url, verbose=True) # Plot if requested if plot: import matplotlib.pyplot as plt scratch_dir = os.path.join(os.environ.get("CLAW", os.getcwd()), 'geoclaw', 'scratch') for topo_name in ['gulf_caribbean.tt3', 'NOAA_Galveston_Houston.tt3', 'galveston_tx.asc']: topo_path = os.path.join(scratch_dir, topo_name) topo = topotools.Topography(topo_path, topo_type=3) topo.plot() fname = os.path.splitext(topo_name)[0] + '.png' plt.savefig(fname)
from clawpack.geoclaw import topotools, dtopotools from clawpack.clawutil.data import get_remote_file subdir = 'figures' os.system('mkdir -p %s' % subdir) def savefigp(fname): fname = os.path.join(subdir, fname) savefig(fname, bbox_inches='tight') print "Created ", fname filename = 'pacific_shorelines_east_4min.npy' url = 'http://www.geoclaw.org/topo/' + filename get_remote_file(url=url, output_dir='.', force=True, verbose=True) shore = load(filename) fault_geometry_file = 'chile2010_usgs.txt' column_map = { "latitude": 0, "longitude": 1, "depth": 2, "slip": 3, "rake": 4, "strike": 5, "dip": 6 } defaults = {'length': 30, 'width': 20} coordinate_specification = 'top center' input_units = {'slip': 'cm', 'depth': 'km', 'length': 'km', 'width': 'km'}
def etopo1_download(xlimits, ylimits, dx=0.0166666666667, dy=None, \ output_dir='.', file_name=None, force=False, verbose=True, \ return_topo=False): """ Create a url to download etopo1 topography from NCEI and save as a topo_type 3 file. Uses the database described at http://www.ngdc.noaa.gov/mgg/global/global.html :Inputs: - *xlimits*: tuple (x1, x2) limits in longitude Must either have -180 <= x1 < x2 <= 180 or 180 <= x1 < x2 <= 360 or -360 <= x1 < x2 <= -180 To download topo for a region spanning longitude 180, you must download two separate files, one on each side. - *ylimits*: tuple (y1, y2) limits in latitude Must have -90 <= y1 < y2 <= 90. - *dx*: resolution in x, default is 1./60. degree = 1 arcminute. - *dy*: resolution in y, default is dy = dx. - *output_dir*: directory to store file, default is '.' - *file_name*: name of file, default is constructed from xlimits,ylimits - *force*: if True, download even if the file already exists. - *verbose*: if True, print info from clawpack.clawutil.data.get_remote_file Note: New NGDC format gives cell-registered values, so shift the values `xllcorner` and `yllcorner` to the specified corner. **To do:** Check whether it is possible to specify grid-registered values as implied at http://www.ngdc.noaa.gov/mgg/global/global.html The `nodata_value` line expected by GeoClaw is now missing, so add this in too. """ from clawpack.geoclaw import util, topotools from clawpack.clawutil.data import get_remote_file import os from numpy import round format = '&format=aaigrid' # topo_type 3 if dy is None: dy = dx arcminute = 1 / 60. if abs(dx - arcminute) > 1e-8 or abs(dy - arcminute) > 1e-8: print('*** Warning: data may not be properly subsampled at') print('*** resolutions other than 1 arcminute, dx=dy=1/60.') x1, x2 = xlimits y1, y2 = ylimits if file_name is None: file_name = 'etopo1_%i_%i_%i_%i_%imin.asc' \ % (int(round(x1)),int(round(x2)),int(round(y1)),int(round(y2)),\ int(round(60*dx))) if (x1 >= 180) and (x1 < x2) and (x2 <= 360): longitude_shift = -360. elif (x1 >= -360) and (x1 < x2) and (x2 <= -180): longitude_shift = 360. else: longitude_shift = 0. x1 = x1 + longitude_shift x2 = x2 + longitude_shift if (x1 < -180) or (x1 >= x2) or (x2 > 180): raise ValueError( "Require -180 <= x1 < x2 <= 180 or 180 <= x1 < x2 <=360") if (y1 < -90) or (y1 >= y2) or (y2 > 90): raise ValueError("Require -90 <= y1 < y2 <= 90") bbox = '&bbox=%1.4f,%1.4f,%1.4f,%1.4f' % (x1, y1, x2, y2) res = '&resx=%1.12f&resy=%1.12f' % (dx, dy) url = 'http://maps.ngdc.noaa.gov/mapviewer-support/wcs-proxy/wcs.groovy' \ + '?request=getcoverage&version=1.0.0&service=wcs' \ + '&coverage=etopo1&CRS=EPSG:4326' \ + format + bbox + res file_path = os.path.join(output_dir, file_name) if os.path.exists(file_path) and (not force): print("Skipping download... file already exists: ", file_path) else: get_remote_file(url, output_dir=output_dir, file_name=file_name, \ verbose=verbose,force=force) x1 = x1 - longitude_shift # shift back before writing header lines = open(file_path).readlines() if lines[2].split()[0] != 'xllcorner': print("*** Error downloading, check the file!") else: x1file = float(lines[2].split()[1]) x2file = float(lines[3].split()[1]) lines[2] = 'xllcorner %1.12f\n' % x1file lines[3] = 'yllcorner %1.12f\n' % x2file if 'nodata_value' not in lines[5]: lines = lines[:5] + ['nodata_value -99999\n'] + lines[5:] print("Added nodata_value line") f = open(file_path, 'w') f.writelines(lines) f.close() print("Created file: ", file_path) if return_topo: topo = topotools.Topography() topo.read(file_path, topo_type=3) return topo
force = False verbose = False # Override download location if len(sys.argv) > 1: storm_names = sys.argv[1:] else: # TODO: Add better failure sys.exit() if name == "global_strip": strips = [-45.0, -30.0, -15.0, 0.0, 15.0, 30.0] for (i, lower_bound) in enumerate(strips): file_name = "strip%s.nc" % i urls.append( form_etopo_URL([-180, lower_bound], [180, lower_bound + 15.0])) # TODO specify file name storm_names.pop("global_strip") # Construct list of downloads urls = [] for name in storm_names: urls.append(topo_urls[name]) for url in urls: data.get_remote_file(url, output_dir=output_dir, verbose=verbose, force=force)
def get_topo(): """ Retrieve the topo files from NOAA. """ base_url = 'https://gis.ngdc.noaa.gov/mapviewer-support/wcs-proxy/wcs.groovy' claw_dir = os.environ['CLAW'] scratch_dir = os.path.join(claw_dir, 'geoclaw', 'scratch') # Specify topo parameters as tuples of the form # (res_mins, lat_min, lat_max, lon_min, lon_max) topo_params = [ (10, 5, 35, -100, -70), # Gulf (1, 28, 31, -92.5, -87.5) ] # New Orleans topo_files = [] for (res_mins, lat_min, lat_max, lon_min, lon_max) in topo_params: # Construct output file name lat_lon = '{}{}_{}{}_{}{}_{}{}'.format(abs(lat_min), 'N' if lat_min >= 0 else 'S', abs(lat_max), 'N' if lat_max >= 0 else 'S', abs(lon_min), 'E' if lon_min >= 0 else 'W', abs(lon_max), 'E' if lon_max >= 0 else 'W') topo_fname = 'etopo1_{}m_{}.nc'.format(res_mins, lat_lon) topo_files.append(os.path.join(scratch_dir, topo_fname)) # Fetch topography # Note: We manually create the query string because using 'urlencode' # causes an internal server error. res_hrs = res_mins / 60.0 url_params = { 'filename': 'etopo1.nc', 'request': 'getcoverage', 'version': '1.0.0', 'service': 'wcs', 'coverage': 'etopo1', 'CRS': 'EPSG:4326', 'format': 'netcdf', 'resx': '{:.18f}'.format(res_hrs), 'resy': '{:.18f}'.format(res_hrs), 'bbox': '{:.14f},{:.14f},{:.14f},{:.14f}'.format(lon_min, lat_min, lon_max, lat_max) } query_str = '&'.join('{}={}'.format(k, v) for k, v in url_params.items()) full_url = '{}?{}'.format(base_url, query_str) data.get_remote_file(full_url, file_name=topo_fname, verbose=True) return topo_files
/Users/rjl/git/GeoClaw_MOST_comparisons/topo/PacificDEMs/*4min.tt3 """ from __future__ import print_function from pylab import * from clawpack.geoclaw import topotools from clawpack.clawutil.data import get_remote_file import os # Fetch earthquake source model (dtopo file): url_geoclaw = 'http://depts.washington.edu/clawpack/geoclaw/' url = os.path.join(url_geoclaw, 'dtopo/tohoku/2011Tohoku_deformation.asc') get_remote_file(url, output_dir='.', verbose=True) # Read etopo1 topography data from NCEI thredds server: # Portion east of -180 (date line): etopo1_url = 'https://www.ngdc.noaa.gov/thredds/dodsC/global/ETOPO1_Ice_g_gmt4.nc' extent = [-180, -110, 20, 60] print('Attempting to read etopo1 data from\n %s' % etopo1_url) etopo = topotools.read_netcdf(path=etopo1_url, extent=extent, coarsen=1, verbose=True) fname = 'etopo1_-180_-110_20_60_1min.asc' etopo.write(fname, topo_type=3)
def etopo1_download(xlimits, ylimits, dx=0.0166666666667, dy=None, \ output_dir='.', file_name=None, force=False, verbose=True, \ return_topo=False): """ Create a url to download etopo1 topography from NCEI and save as a topo_type 3 file. Uses the database described at http://www.ngdc.noaa.gov/mgg/global/global.html :Inputs: - *xlimits*: tuple (x1, x2) limits in longitude Must either have -180 <= x1 < x2 <= 180 or 180 <= x1 < x2 <= 360 or -360 <= x1 < x2 <= -180 To download topo for a region spanning longitude 180, you must download two separate files, one on each side. - *ylimits*: tuple (y1, y2) limits in latitude Must have -90 <= y1 < y2 <= 90. - *dx*: resolution in x, default is 1./60. degree = 1 arcminute. - *dy*: resolution in y, default is dy = dx. - *output_dir*: directory to store file, default is '.' - *file_name*: name of file, default is constructed from xlimits,ylimits - *force*: if True, download even if the file already exists. - *verbose*: if True, print info from clawpack.clawutil.data.get_remote_file Note: New NGDC format gives cell-registered values, so shift the values `xllcorner` and `yllcorner` to the specified corner. **To do:** Check whether it is possible to specify grid-registered values as implied at http://www.ngdc.noaa.gov/mgg/global/global.html The `nodata_value` line expected by GeoClaw is now missing, so add this in too. """ from clawpack.geoclaw import util, topotools from clawpack.clawutil.data import get_remote_file import os from numpy import round format = '&format=aaigrid' # topo_type 3 if dy is None: dy = dx x1,x2 = xlimits y1,y2 = ylimits if file_name is None: file_name = 'etopo1_%i_%i_%i_%i_%imin.tt3' \ % (int(round(x1)),int(round(x2)),int(round(y1)),int(round(y2)),\ int(round(60*dx))) if (x1>=180) and (x1<x2) and (x2<=360): longitude_shift = -360. elif (x1>=-360) and (x1<x2) and (x2<=-180): longitude_shift = 360. else: longitude_shift = 0. x1 = x1 + longitude_shift x2 = x2 + longitude_shift if (x1<-180) or (x1>=x2) or (x2>180): raise ValueError("Require -180 <= x1 < x2 <= 180 or 180 <= x1 < x2 <=360") if (y1<-90) or (y1>=y2) or (y2>90): raise ValueError("Require -90 <= y1 < y2 <= 90") bbox = '&bbox=%1.4f,%1.4f,%1.4f,%1.4f' % (x1,y1,x2,y2) res = '&resx=%1.12f&resy=%1.12f' % (dx,dy) url = 'http://maps.ngdc.noaa.gov/mapviewer-support/wcs-proxy/wcs.groovy' \ + '?request=getcoverage&version=1.0.0&service=wcs' \ + '&coverage=etopo1&CRS=EPSG:4326' \ + format + bbox + res file_path = os.path.join(output_dir,file_name) if os.path.exists(file_path) and (not force): print "Skipping download... file already exists: ",file_path else: get_remote_file(url, output_dir=output_dir, file_name=file_name, \ verbose=verbose,force=force) x1 = x1 - longitude_shift # shift back before writing header lines = open(file_path).readlines() if lines[2].split()[0] != 'xllcorner': print "*** Error downloading, check the file!" else: lines[2] = 'xllcorner %1.12f\n' % x1 lines[3] = 'yllcorner %1.12f\n' % y1 lines = lines[:5] + ['nodata_value -99999\n'] + lines[5:] f = open(file_path,'w') f.writelines(lines) f.close() print "Shifted xllcorner and yllcorner to cell centers" print " and added nodata_value line" print "Created file: ",file_path if return_topo: topo = topotools.Topography() topo.read(file_path, topo_type=3) return topo
#!/usr/bin/env python """Fetch relevant bathymetry for the Atlantic examples""" import sys import os import clawpack.clawutil.data as data if __name__ == "__main__": # Override download location if len(sys.argv) > 1: output_dir = sys.argv[1] else: output_dir = os.getcwd() urls = ["https://www.dropbox.com/s/jkww7jm78azswk5/atlantic_1min.tt3.tar.bz2?dl=0", "https://www.dropbox.com/s/vafi7k6zqn5cfs1/newyork_3s.tt3.tar.bz2?dl=0"] for url in urls: data.get_remote_file(url, output_dir=output_dir)