def get_coops_gauge(stnid, bdate, edate, btime="0000", etime="2359", output_dir='.', file_name=None, verbose=False): r""" Attempt to download file from CO-OPS website. Note: Only works for 5 days (?) of data or less. See http://tidesandcurrents.noaa.gov/1mindata.html for a list of stations :Example: stnid = 9419750 # Crescent City bdate = "20110311" edate = "20110316" """ if file_name is None: file_name = "%s_%s_%s_to_%s_%s.csv" \ % (stnid, bdate, btime, edate, etime) url = "http://tidesandcurrents.noaa.gov/cgi-bin/tsunamicsv.cgi?&tmpname=&stnid=%s&bdate=%s&edate=%s&bdatetime=%s&edatetime=%s" \ % (stnid, bdate, edate, btime, etime) get_remote_file(url, output_dir=output_dir, file_name=file_name, verbose=verbose) print "Attempted to download to " print " %s/%s" % (output_dir, file_name)
def fetch_dtopo(dtopo_file="sumatra_earthquake.tt3", force=False): if not os.path.exists(dtopo_file) or force: # Fetch data subfault_spec_file = "sumatra.subfault" url = "http://neic.usgs.gov/neis/eq_depot/2004/eq_041226/result/static_out" util.get_remote_file(url, output_dir=os.getcwd(), file_name=subfault_spec_file) # Convert to a dtopo file fault = CaltechFault() fault.read(subfault_spec_file, rupture_type='dynamic') x, y = fault.create_dtopo_xy() dtopo = fault.create_dtopography(x, y, verbose=True) dtopo.write(dtopo_file)
def get_topo(makeplots=False): """ Retrieve the topo file from the GeoClaw repository. """ from clawpack.geoclaw import topotools, util topo_fname = 'etopo10min120W60W60S0S.asc' url = 'http://www.geoclaw.org/topo/etopo/' + topo_fname util.get_remote_file(url, output_dir=scratch_dir, file_name=topo_fname, verbose=True) if makeplots: from matplotlib import pyplot as plt topo = topotools.Topography(topo_fname, topo_type=2) topo.plot() fname = os.path.splitext(topo_fname)[0] + '.png' plt.savefig(fname) print "Created ",fname
def get_topo(makeplots=False): """ Retrieve the topo file from the GeoClaw repository. """ from clawpack.geoclaw import topotools, util topo_fname = 'etopo10min120W60W60S0S.asc' url = 'http://www.geoclaw.org/topo/etopo/' + topo_fname util.get_remote_file(url, output_dir=scratch_dir, file_name=topo_fname, verbose=True) if makeplots: from matplotlib import pyplot as plt topo = topotools.Topography(topo_fname, topo_type=2) topo.plot() fname = os.path.splitext(topo_fname)[0] + '.png' plt.savefig(fname) print "Created ", fname
def test_get_remote_file(): """Test the ability to fetch a remote file from the web.""" temp_path = tempfile.mkdtemp() try: url = "".join(('https://raw.githubusercontent.com/rjleveque/geoclaw/', '5f675256c043e59e5065f9f3b5bdd41c2901702c/src/python/', 'geoclaw/tests/kahului_sample_1s.tt2')) util.get_remote_file(url, output_dir=temp_path, force=True) local_path = os.path.join(temp_path, os.path.basename(url)) download_topo = topotools.Topography(path=local_path) test_path = os.path.join(testdir, "data", os.path.basename(url)) test_topo = topotools.Topography(path=test_path) assert numpy.allclose(download_topo.Z, test_topo.Z), \ "Downloaded file does not match %s" % test_path except AssertionError as e: shutil.copy(local_path, os.path.join(os.getcwd(), "remote_file.tt2")) raise e finally: shutil.rmtree(temp_path)
#!/usr/bin/env python """Simple implementation of a file fetcher""" import sys import os import clawpack.geoclaw.util as util if __name__ == "__main__": # Default URLs base_url = "http://www.columbia.edu/~ktm2132/bathy/" # Override base_url if len(sys.argv) > 1: base_url = sys.argv[1] urls = [os.path.join(base_url, 'gulf_caribbean.tt3.tar.bz2')] for url in urls: util.get_remote_file(url, output_dir='.', verbose=True)
#!/usr/bin/env python """Fetch relevant bathymetry for the Atlantic examples""" import sys import os import clawpack.geoclaw.util as util if __name__ == "__main__": # Override download location if len(sys.argv) > 1: output_dir = sys.argv[1] else: output_dir = os.getcwd() urls = ["https://dl.dropboxusercontent.com/u/8449354/bathy/atlantic_2min.tt3.tar.bz2", "https://dl.dropboxusercontent.com/u/8449354/bathy/newyork_3s.tt3.tar.bz2"] for url in urls: util.get_remote_file(url, output_dir=output_dir)
def fetch_topo(): # Fetch data url = "" util.get_remote_file(url, output_dir=os.getcwd())
#!/usr/bin/env python """Simple implementation of a file fetcher""" import sys import os import clawpack.geoclaw.util as util if __name__ == "__main__": # Default URLs base_url = "http://www.columbia.edu/~ktm2132/bathy/" # Override base_url if len(sys.argv) > 1: base_url = sys.argv[1] urls = [os.path.join(base_url, 'gulf_caribbean.tt3.tar.bz2')] for url in urls: util.get_remote_file(url)