def test_get_all_swwfiles(self): try: swwfiles = get_all_swwfiles('', 'test.txt') # Invalid except IOError: pass else: raise Exception('Should have raised exception')
def test_get_all_swwfiles(self): try: swwfiles = get_all_swwfiles('','test.txt') #Invalid except IOError: pass else: raise Exception('Should have raised exception')
def test_get_all_swwfiles1(self): temp_dir = tempfile.mkdtemp('','sww_test') filename0 = tempfile.mktemp('.sww','test',temp_dir) filename1 = tempfile.mktemp('.sww','test',temp_dir) filename2 = tempfile.mktemp('.sww','test',temp_dir) filename3 = tempfile.mktemp('.sww','test',temp_dir) #print'filename', filename0,filename1,filename2,filename3 fid0 = open(filename0, 'w') fid1 = open(filename1, 'w') fid2 = open(filename2, 'w') fid3 = open(filename3, 'w') fid0.write('hello') fid1.write('hello') fid2.write('hello') fid3.write('hello') fid0.close() fid1.close() fid2.close() fid3.close() dir, name0 = os.path.split(filename0) #print 'dir',dir,name0 iterate=get_all_swwfiles(dir,'test') del_dir(temp_dir) # removeall(temp_dir) _, name0 = os.path.split(filename0) #print'name0',name0[:-4],iterate[0] _, name1 = os.path.split(filename1) _, name2 = os.path.split(filename2) _, name3 = os.path.split(filename3) assert name0[:-4] in iterate assert name1[:-4] in iterate assert name2[:-4] in iterate assert name3[:-4] in iterate assert len(iterate)==4
def test_get_all_swwfiles1(self): temp_dir = tempfile.mkdtemp('', 'sww_test') filename0 = tempfile.mktemp('.sww', 'test', temp_dir) filename1 = tempfile.mktemp('.sww', 'test', temp_dir) filename2 = tempfile.mktemp('.sww', 'test', temp_dir) filename3 = tempfile.mktemp('.sww', 'test', temp_dir) # print'filename', filename0,filename1,filename2,filename3 fid0 = open(filename0, 'w') fid1 = open(filename1, 'w') fid2 = open(filename2, 'w') fid3 = open(filename3, 'w') fid0.write('hello') fid1.write('hello') fid2.write('hello') fid3.write('hello') fid0.close() fid1.close() fid2.close() fid3.close() dir, name0 = os.path.split(filename0) # print 'dir',dir,name0 iterate = get_all_swwfiles(dir, 'test') del_dir(temp_dir) # removeall(temp_dir) _, name0 = os.path.split(filename0) # print'name0',name0[:-4],iterate[0] _, name1 = os.path.split(filename1) _, name2 = os.path.split(filename2) _, name3 = os.path.split(filename3) assert name0[:-4] in iterate assert name1[:-4] in iterate assert name2[:-4] in iterate assert name3[:-4] in iterate assert len(iterate) == 4
def sww2dem_batch( basename_in, extra_name_out=None, quantities=None, # defaults to elevation reduction=None, cellsize=10, number_of_decimal_places=None, NODATA_value=-9999, easting_min=None, easting_max=None, northing_min=None, northing_max=None, verbose=False, origin=None, datum='WGS84', format='ers'): """Wrapper for sww2dem. See sww2dem to find out what most of the parameters do. Note that since this is a batch command, the normal filename naming conventions do not apply. basename_in is a path to sww file/s, without the .sww extension. extra_name_out is a postfix to add to the output filename. Quantities is a list of quantities. Each quantity will be calculated for each sww file. This returns the basenames of the files returned, which is made up of the dir and all of the file name, except the extension. This function returns the names of the files produced. It will also produce as many output files as there are input sww files. """ if quantities is None: quantities = ['elevation'] if type(quantities) is str: quantities = [quantities] # How many sww files are there? dir, base = os.path.split(basename_in) iterate_over = get_all_swwfiles(dir, base, verbose) if dir == "": dir = "." # Unix compatibility files_out = [] for sww_file in iterate_over: for quantity in quantities: if extra_name_out is None: basename_out = sww_file + '_' + quantity else: basename_out = sww_file + '_' + quantity + '_' + extra_name_out swwin = dir + os.sep + sww_file + '.sww' demout = dir + os.sep + basename_out + '.' + format if verbose: log.critical('sww2dem: %s => %s' % (swwin, demout)) file_out = sww2dem(swwin, demout, quantity, reduction, cellsize, number_of_decimal_places, NODATA_value, easting_min, easting_max, northing_min, northing_max, verbose, origin, datum) files_out.append(file_out) return files_out
def sww2csv_gauges(sww_file, gauge_file, out_name='gauge_', quantities=['stage', 'depth', 'elevation', 'xmomentum', 'ymomentum'], verbose=False, use_cache=True, output_centroids=False): """ Inputs: NOTE: if using csv2timeseries_graphs after creating csv file, it is essential to export quantities 'depth' and 'elevation'. 'depth' is good to analyse gauges on land and elevation is used automatically by csv2timeseries_graphs in the legend. sww_file: path to any sww file gauge_file: Assumes that it follows this format name, easting, northing, elevation point1, 100.3, 50.2, 10.0 point2, 10.3, 70.3, 78.0 NOTE: order of column can change but names eg 'easting', 'elevation' must be the same! ALL lowercaps! out_name: prefix for output file name (default is 'gauge_') Outputs: one file for each gauge/point location in the points file. They will be named with this format in the same directory as the 'sww_file' <out_name><name>.csv eg gauge_point1.csv if <out_name> not supplied myfile_2_point1.csv if <out_name> ='myfile_2_' They will all have a header Usage: sww2csv_gauges(sww_file='test1.sww', quantities = ['stage', 'elevation','depth','bearing'], gauge_file='gauge.txt') Interpolate the quantities at a given set of locations, given an sww file. The results are written to a csv file. In the future let points be a points file. And the user choose the quantities. This is currently quite specific. If it needs to be more general, change things. This is really returning speed, not velocity. """ from csv import reader,writer from anuga.utilities.numerical_tools import ensure_numeric, mean, NAN import string from anuga.utilities.file_utils import get_all_swwfiles from anuga.abstract_2d_finite_volumes.util import file_function assert isinstance(gauge_file,string_types) or isinstance(gauge_file, str), 'Gauge filename must be a string or unicode' assert isinstance(out_name,string_types) or isinstance(out_name, str), 'Output filename prefix must be a string' try: gid = open(gauge_file) point_reader = reader(gid) gid.close() except Exception as e: msg = 'File "%s" could not be opened: Error="%s"' % (gauge_file, e) raise Exception(msg) if verbose: log.critical('Gauges obtained from: %s' % gauge_file) gid = open(gauge_file) point_reader = reader(gid) points = [] point_name = [] # read point info from file for i,row in enumerate(point_reader): # read header and determine the column numbers to read correctly. if i==0: for j,value in enumerate(row): if value.strip()=='easting':easting=j if value.strip()=='northing':northing=j if value.strip()=='name':name=j if value.strip()=='elevation':elevation=j else: #points.append([float(row[easting]),float(row[northing])]) points.append([float(row[easting]),float(row[northing])]) point_name.append(row[name]) gid.close() #convert to array for file_function points_array = num.array(points,num.float) points_array = ensure_absolute(points_array) #print 'points_array', points_array dir_name, base = os.path.split(sww_file) #need to get current directory so when path and file #are "joined" below the directory is correct if dir_name == '': dir_name =getcwd() if access(sww_file,R_OK): if verbose: log.critical('File %s exists' % sww_file) else: msg = 'File "%s" could not be opened: no read permission' % sww_file raise Exception(msg) sww_files = get_all_swwfiles(look_in_dir=dir_name, base_name=base, verbose=verbose) # fudge to get SWW files in 'correct' order, oldest on the left sww_files.sort() if verbose: log.critical('sww files=%s' % sww_files) #to make all the quantities lower case for file_function quantities = [quantity.lower() for quantity in quantities] # what is quantities are needed from sww file to calculate output quantities # also core_quantities = ['stage', 'elevation', 'xmomentum', 'ymomentum'] gauge_file = out_name heading = [quantity for quantity in quantities] heading.insert(0,'time') heading.insert(1,'hours') if verbose: log.critical('Writing csv files') quake_offset_time = None is_opened = [False]*len(points_array) for sww_file in sww_files: sww_file = join(dir_name, sww_file+'.sww') callable_sww = file_function(sww_file, quantities=core_quantities, interpolation_points=points_array, verbose=verbose, use_cache=use_cache, output_centroids = output_centroids) if quake_offset_time is None: quake_offset_time = callable_sww.starttime for point_i, point in enumerate(points_array): for time in callable_sww.get_time(): # add domain starttime to relative time. quake_time = time + quake_offset_time point_quantities = callable_sww(time, point_i) # __call__ is overridden if point_quantities[0] != NAN: if is_opened[point_i] == False: points_handle = open(dir_name + sep + gauge_file + point_name[point_i] + '.csv', 'w') points_writer = writer(points_handle) points_writer.writerow(heading) is_opened[point_i] = True else: points_handle = open(dir_name + sep + gauge_file + point_name[point_i] + '.csv', 'a') points_writer = writer(points_handle) points_list = [quake_time, quake_time/3600.] + _quantities2csv(quantities, point_quantities, callable_sww.centroids, point_i) points_writer.writerow(points_list) points_handle.close() else: if verbose: msg = 'gauge' + point_name[point_i] + 'falls off the mesh in file ' + sww_file + '.' log.warning(msg)
def sww2dem_batch(basename_in, extra_name_out=None, quantities=None, # defaults to elevation reduction=None, cellsize=10, number_of_decimal_places=None, NODATA_value=-9999, easting_min=None, easting_max=None, northing_min=None, northing_max=None, verbose=False, origin=None, datum='WGS84', format='ers'): """Wrapper for sww2dem. See sww2dem to find out what most of the parameters do. Note that since this is a batch command, the normal filename naming conventions do not apply. basename_in is a path to sww file/s, without the .sww extension. extra_name_out is a postfix to add to the output filename. Quantities is a list of quantities. Each quantity will be calculated for each sww file. This returns the basenames of the files returned, which is made up of the dir and all of the file name, except the extension. This function returns the names of the files produced. It will also produce as many output files as there are input sww files. """ if quantities is None: quantities = ['elevation'] if type(quantities) is str: quantities = [quantities] # How many sww files are there? dir, base = os.path.split(basename_in) iterate_over = get_all_swwfiles(dir, base, verbose) if dir == "": dir = "." # Unix compatibility files_out = [] for sww_file in iterate_over: for quantity in quantities: if extra_name_out is None: basename_out = sww_file + '_' + quantity else: basename_out = sww_file + '_' + quantity + '_' + extra_name_out swwin = dir+os.sep+sww_file+'.sww' demout = dir+os.sep+basename_out+'.'+format if verbose: log.critical('sww2dem: %s => %s' % (swwin, demout)) file_out = sww2dem(swwin, demout, quantity, reduction, cellsize, number_of_decimal_places, NODATA_value, easting_min, easting_max, northing_min, northing_max, verbose, origin, datum) files_out.append(file_out) return files_out
dir_name, base = os.path.split(sww_file) #need to get current directory so when path and file #are "joined" below the directory is correct if dir_name == '': dir_name =getcwd() if access(sww_file,R_OK): if verbose: log.critical('File %s exists' % sww_file) else: msg = 'File "%s" could not be opened: no read permission' % sww_file raise Exception(msg) sww_files = get_all_swwfiles(look_in_dir=dir_name, base_name=base, verbose=verbose) # fudge to get SWW files in 'correct' order, oldest on the left sww_files.sort() if verbose: log.critical('sww files=%s' % sww_files) #to make all the quantities lower case for file_function quantities = [quantity.lower() for quantity in quantities] # what is quantities are needed from sww file to calculate output quantities # also core_quantities = ['stage', 'elevation', 'xmomentum', 'ymomentum']
def get_maximum_inundation_data(filename, polygon=None, time_interval=None, use_centroid_values=True, verbose=False): """Compute maximum run up height from sww file. filename path to SWW file to read polygon if specified resrict to points inside this polygon assumed absolute coordinates and in same zone as domain time_interval if specified resrict to within the period specified use_centroid_values verbose True if this function is to be verbose Returns (maximal_runup, maximal_runup_location). Usage: runup, location = get_maximum_inundation_data(filename, polygon=None, time_interval=None, verbose=False) Algorithm is as in get_maximum_inundation_elevation from shallow_water_domain except that this function works with the SWW file and computes the maximal runup height over multiple timesteps. If no inundation is found within polygon and time_interval the return value is None signifying "No Runup" or "Everything is dry". """ # We are using nodal values here as that is what is stored in sww files. # Water depth below which it is considered to be 0 in the model # FIXME (Ole): Allow this to be specified as a keyword argument as well from anuga.geometry.polygon import inside_polygon from anuga.config import minimum_allowed_height from anuga.file.netcdf import NetCDFFile dir, base = os.path.split(filename) iterate_over = get_all_swwfiles(dir, base) if verbose: print iterate_over # Read sww file if verbose: log.critical('Reading from %s' % filename) # FIXME: Use general swwstats (when done) maximal_runup = None maximal_runup_location = None for _, swwfile in enumerate (iterate_over): # Read sww file filename = os.path.join(dir, swwfile+'.sww') if verbose: log.critical('Reading from %s' % filename) # FIXME: Use general swwstats (when done) fid = NetCDFFile(filename) # Get geo_reference # sww files don't have to have a geo_ref try: geo_reference = Geo_reference(NetCDFObject=fid) except AttributeError: geo_reference = Geo_reference() # Default georef object xllcorner = geo_reference.get_xllcorner() yllcorner = geo_reference.get_yllcorner() # Get extent volumes = fid.variables['volumes'][:] x = fid.variables['x'][:] + xllcorner y = fid.variables['y'][:] + yllcorner # Get the relevant quantities (Convert from single precison) elevation = num.array(fid.variables['elevation'][:], num.float) stage = num.array(fid.variables['stage'][:], num.float) if verbose: print 'stage.shape ',stage.shape print 'elevation.shape ',elevation.shape # Here's where one could convert nodal information to centroid # information but is probably something we need to write in C. # Here's a Python thought which is NOT finished!!! if use_centroid_values is True: vols0=volumes[:,0] vols1=volumes[:,1] vols2=volumes[:,2] # Then use these to compute centroid averages x=(x[vols0]+x[vols1]+x[vols2])/3.0 y=(y[vols0]+y[vols1]+y[vols2])/3.0 elevation=(elevation[vols0]+elevation[vols1]+elevation[vols2])/3.0 stage=(stage[:,vols0]+stage[:,vols1]+stage[:,vols2])/3.0 # Spatial restriction if polygon is not None: msg = 'polygon must be a sequence of points.' assert len(polygon[0]) == 2, msg # FIXME (Ole): Make a generic polygon input check in polygon.py # and call it here points = num.ascontiguousarray(num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1)) point_indices = inside_polygon(points, polygon) # Restrict quantities to polygon elevation = num.take(elevation, point_indices, axis=0) stage = num.take(stage, point_indices, axis=1) # Get info for location of maximal runup points_in_polygon = num.take(points, point_indices, axis=0) x = points_in_polygon[:,0] y = points_in_polygon[:,1] else: # Take all points point_indices = num.arange(len(x)) # Temporal restriction time = fid.variables['time'][:] if verbose: print time all_timeindices = num.arange(len(time)) if time_interval is not None: msg = 'time_interval must be a sequence of length 2.' assert len(time_interval) == 2, msg msg = 'time_interval %s must not be decreasing.' % time_interval assert time_interval[1] >= time_interval[0], msg msg = 'Specified time interval [%.8f:%.8f] ' % tuple(time_interval) msg += 'must does not match model time interval: [%.8f, %.8f]\n' \ % (time[0], time[-1]) if time_interval[1] < time[0]: fid.close() raise ValueError(msg) if time_interval[0] > time[-1]: fid.close() raise ValueError(msg) # Take time indices corresponding to interval (& is bitwise AND) timesteps = num.compress((time_interval[0] <= time) \ & (time <= time_interval[1]), all_timeindices) msg = 'time_interval %s did not include any model timesteps.' \ % time_interval assert not num.alltrue(timesteps == 0), msg else: # Take them all timesteps = all_timeindices fid.close() # Compute maximal runup for each timestep #maximal_runup = None #maximal_runup_location = None #maximal_runups = [None] #maximal_runup_locations = [None] for i in timesteps: ## if use_centroid_values is True: ## stage_i = stage[i,:] ## else: ## stage_i = stage[i,:] stage_i = stage[i,:] depth = stage_i - elevation if verbose: print '++++++++' # Get wet nodes i.e. nodes with depth>0 within given region # and timesteps wet_nodes = num.where(depth > 0.0)[0] if verbose: print stage_i.shape print num.max(stage_i) #print max(wet_elevation) if num.alltrue(wet_nodes == 0): runup = None else: # Find maximum elevation among wet nodes wet_elevation = num.take(elevation, wet_nodes, axis=0) if verbose: pass #print wet_elevation runup_index = num.argmax(wet_elevation) runup = max(wet_elevation) if verbose: print 'max(wet_elevation) ',max(wet_elevation) assert wet_elevation[runup_index] == runup # Must be True if runup > maximal_runup: maximal_runup = runup # works even if maximal_runup is None # Record location wet_x = num.take(x, wet_nodes, axis=0) wet_y = num.take(y, wet_nodes, axis=0) maximal_runup_location = [wet_x[runup_index], \ wet_y[runup_index]] if verbose: print i, runup return maximal_runup, maximal_runup_location
dir_name, base = os.path.split(sww_file) #need to get current directory so when path and file #are "joined" below the directory is correct if dir_name == '': dir_name = getcwd() if access(sww_file, R_OK): if verbose: log.critical('File %s exists' % sww_file) else: msg = 'File "%s" could not be opened: no read permission' % sww_file raise Exception(msg) sww_files = get_all_swwfiles(look_in_dir=dir_name, base_name=base, verbose=verbose) # fudge to get SWW files in 'correct' order, oldest on the left sww_files.sort() if verbose: log.critical('sww files=%s' % sww_files) #to make all the quantities lower case for file_function quantities = [quantity.lower() for quantity in quantities] # what is quantities are needed from sww file to calculate output quantities # also core_quantities = ['stage', 'elevation', 'xmomentum', 'ymomentum']
def get_maximum_inundation_data(filename, polygon=None, time_interval=None, use_centroid_values=True, verbose=False): """Compute maximum run up height from sww file. filename path to SWW file to read polygon if specified resrict to points inside this polygon assumed absolute coordinates and in same zone as domain time_interval if specified resrict to within the period specified use_centroid_values verbose True if this function is to be verbose Returns (maximal_runup, maximal_runup_location). Usage: runup, location = get_maximum_inundation_data(filename, polygon=None, time_interval=None, verbose=False) Algorithm is as in get_maximum_inundation_elevation from shallow_water_domain except that this function works with the SWW file and computes the maximal runup height over multiple timesteps. If no inundation is found within polygon and time_interval the return value is None signifying "No Runup" or "Everything is dry". """ # We are using nodal values here as that is what is stored in sww files. # Water depth below which it is considered to be 0 in the model # FIXME (Ole): Allow this to be specified as a keyword argument as well from anuga.geometry.polygon import inside_polygon from anuga.config import minimum_allowed_height from anuga.file.netcdf import NetCDFFile dir, base = os.path.split(filename) iterate_over = get_all_swwfiles(dir, base) if verbose: print iterate_over # Read sww file if verbose: log.critical('Reading from %s' % filename) # FIXME: Use general swwstats (when done) maximal_runup = None maximal_runup_location = None for _, swwfile in enumerate(iterate_over): # Read sww file filename = os.path.join(dir, swwfile + '.sww') if verbose: log.critical('Reading from %s' % filename) # FIXME: Use general swwstats (when done) fid = NetCDFFile(filename) # Get geo_reference # sww files don't have to have a geo_ref try: geo_reference = Geo_reference(NetCDFObject=fid) except AttributeError: geo_reference = Geo_reference() # Default georef object xllcorner = geo_reference.get_xllcorner() yllcorner = geo_reference.get_yllcorner() # Get extent volumes = fid.variables['volumes'][:] x = fid.variables['x'][:] + xllcorner y = fid.variables['y'][:] + yllcorner # Get the relevant quantities (Convert from single precison) elevation = num.array(fid.variables['elevation'][:], num.float) stage = num.array(fid.variables['stage'][:], num.float) if verbose: print 'stage.shape ', stage.shape print 'elevation.shape ', elevation.shape # Here's where one could convert nodal information to centroid # information but is probably something we need to write in C. # Here's a Python thought which is NOT finished!!! if use_centroid_values is True: vols0 = volumes[:, 0] vols1 = volumes[:, 1] vols2 = volumes[:, 2] # Then use these to compute centroid averages x = (x[vols0] + x[vols1] + x[vols2]) / 3.0 y = (y[vols0] + y[vols1] + y[vols2]) / 3.0 elevation = (elevation[vols0] + elevation[vols1] + elevation[vols2]) / 3.0 stage = (stage[:, vols0] + stage[:, vols1] + stage[:, vols2]) / 3.0 # Spatial restriction if polygon is not None: msg = 'polygon must be a sequence of points.' assert len(polygon[0]) == 2, msg # FIXME (Ole): Make a generic polygon input check in polygon.py # and call it here points = num.ascontiguousarray( num.concatenate((x[:, num.newaxis], y[:, num.newaxis]), axis=1)) point_indices = inside_polygon(points, polygon) # Restrict quantities to polygon elevation = num.take(elevation, point_indices, axis=0) stage = num.take(stage, point_indices, axis=1) # Get info for location of maximal runup points_in_polygon = num.take(points, point_indices, axis=0) x = points_in_polygon[:, 0] y = points_in_polygon[:, 1] else: # Take all points point_indices = num.arange(len(x)) # Temporal restriction time = fid.variables['time'][:] if verbose: print time all_timeindices = num.arange(len(time)) if time_interval is not None: msg = 'time_interval must be a sequence of length 2.' assert len(time_interval) == 2, msg msg = 'time_interval %s must not be decreasing.' % time_interval assert time_interval[1] >= time_interval[0], msg msg = 'Specified time interval [%.8f:%.8f] ' % tuple(time_interval) msg += 'must does not match model time interval: [%.8f, %.8f]\n' \ % (time[0], time[-1]) if time_interval[1] < time[0]: fid.close() raise ValueError(msg) if time_interval[0] > time[-1]: fid.close() raise ValueError(msg) # Take time indices corresponding to interval (& is bitwise AND) timesteps = num.compress((time_interval[0] <= time) \ & (time <= time_interval[1]), all_timeindices) msg = 'time_interval %s did not include any model timesteps.' \ % time_interval assert not num.alltrue(timesteps == 0), msg else: # Take them all timesteps = all_timeindices fid.close() # Compute maximal runup for each timestep #maximal_runup = None #maximal_runup_location = None #maximal_runups = [None] #maximal_runup_locations = [None] for i in timesteps: ## if use_centroid_values is True: ## stage_i = stage[i,:] ## else: ## stage_i = stage[i,:] stage_i = stage[i, :] depth = stage_i - elevation if verbose: print '++++++++' # Get wet nodes i.e. nodes with depth>0 within given region # and timesteps wet_nodes = num.where(depth > 0.0)[0] if verbose: print stage_i.shape print num.max(stage_i) #print max(wet_elevation) if num.alltrue(wet_nodes == 0): runup = None else: # Find maximum elevation among wet nodes wet_elevation = num.take(elevation, wet_nodes, axis=0) if verbose: pass #print wet_elevation runup_index = num.argmax(wet_elevation) runup = max(wet_elevation) if verbose: print 'max(wet_elevation) ', max(wet_elevation) assert wet_elevation[runup_index] == runup # Must be True if runup > maximal_runup: maximal_runup = runup # works even if maximal_runup is None # Record location wet_x = num.take(x, wet_nodes, axis=0) wet_y = num.take(y, wet_nodes, axis=0) maximal_runup_location = [wet_x[runup_index], \ wet_y[runup_index]] if verbose: print i, runup return maximal_runup, maximal_runup_location