def __init__(self,infile,**kwargs): self.infile = infile self.__dict__.update(kwargs) # Read in the array print 'Reading data from: %s...'%self.infile if self.infile[-3:]=='.gz': LL,self.Zin = read_xyz_gz(self.infile) elif self.infile[-3:] in ['txt','dat']: LL,self.Zin = read_xyz(self.infile) self.Zin = np.ravel(self.Zin) elif self.infile[-3:]=='shp': LL,self.Zin = readShpBathy(self.infile,FIELDNAME=self.shapefieldname) elif self.infile[-3:]=='.nc': self.loadnc() LL = self._returnXY(self.xgrd,self.ygrd) self.Zin = np.ravel(self.Zin) elif self.infile[-3:] in ['dem','asc']: xgrd,ygrd,self.Zin = readraster(self.infile) LL = self._returnXY(xgrd,ygrd) self.Zin = np.ravel(self.Zin) self.npt = len(LL) if self.convert2utm: # Convert the coordinates print 'Transforming the coordinates to UTM...' self.XY=ll2utm(LL,self.utmzone,self.CS,self.isnorth) else: self.XY=LL self._returnNonNan()
def __init__(self, infile, **kwargs): self.infile = infile self.__dict__.update(kwargs) # Read in the array print 'Reading data from: %s...' % self.infile if self.infile[-3:] == '.gz': LL, self.Zin = read_xyz_gz(self.infile) elif self.infile[-3:] in ['txt', 'dat']: LL, self.Zin = read_xyz(self.infile) self.Zin = np.ravel(self.Zin) elif self.infile[-3:] == 'shp': LL, self.Zin = readShpBathy(self.infile, FIELDNAME=self.shapefieldname) elif self.infile[-3:] == '.nc': self.loadnc() LL = self._returnXY(self.xgrd, self.ygrd) self.Zin = np.ravel(self.Zin) elif self.infile[-3:] in ['dem', 'asc']: xgrd, ygrd, self.Zin = readraster(self.infile) LL = self._returnXY(xgrd, ygrd) self.Zin = np.ravel(self.Zin) self.npt = len(LL) if self.convert2utm: # Convert the coordinates print 'Transforming the coordinates to UTM...' self.XY = ll2utm(LL, self.utmzone, self.CS, self.isnorth) else: self.XY = LL self._returnNonNan()
def __init__(self,bbox,dx,dy,**kwargs): self.__dict__.update(kwargs) # Generate the grid xy0 = ll2utm([bbox[0],bbox[2]],self.utmzone,self.CS,self.isnorth) xy1 = ll2utm([bbox[1],bbox[3]],self.utmzone,self.CS,self.isnorth) self.x0 = xy0[0,0] self.y0 = xy0[0,1] self.x1 = xy1[0,0] self.y1 = xy1[0,1] self.dx=dx self.dy=dy xgrd = np.arange(self.x0,self.x1,dx) ygrd = np.arange(self.y0,self.y1,dy) self.nx = len(xgrd) self.ny = len(ygrd) self.npts = self.nx*self.ny self.X,self.Y = np.meshgrid(xgrd,ygrd)
def __init__(self, bbox, dx, dy, **kwargs): self.__dict__.update(kwargs) # Generate the grid xy0 = ll2utm([bbox[0], bbox[2]], self.utmzone, self.CS, self.isnorth) xy1 = ll2utm([bbox[1], bbox[3]], self.utmzone, self.CS, self.isnorth) self.x0 = xy0[0, 0] self.y0 = xy0[0, 1] self.x1 = xy1[0, 0] self.y1 = xy1[0, 1] self.dx = dx self.dy = dy xgrd = np.arange(self.x0, self.x1, dx) ygrd = np.arange(self.y0, self.y1, dy) self.nx = len(xgrd) self.ny = len(ygrd) self.npts = self.nx * self.ny self.X, self.Y = np.meshgrid(xgrd, ygrd)
def __init__(self, **kwargs): self.__dict__.update(kwargs) # Check if the input file is not a list T = type(self.infile) if T != list: self.multifile = False # Read in the array print 'Reading data from: %s...' % self.infile if self.infile[-3:] == '.gz': LL, self.Zin = read_xyz_gz(self.infile) if self.infile[-3:] == 'txt': LL, self.Zin = read_xyz(self.infile) elif self.infile[-3:] == 'shp': LL, self.Zin = readShpBathy(self.infile) elif self.infile[-3:] == 'dem': LL, self.Zin = readDEM(self.infile, True) if self.infile[-3:] == '.nc': self.loadnc(fv=2) LL = self._returnXY() self.Zin = np.ravel(self.Zin) self.npt = len(self.Zin) if self.convert2utm: if self.bbox == None: # Work out the domain limits from the input file pdb.set_trace() self.bbox = [ LL[:, 0].min(), LL[:, 0].max(), LL[:, 1].min(), LL[:, 1].max() ] else: # Clip the points outside of the domain print 'Clipping points outside of the bounding box...' LL = self.clipPoints(LL) # Convert the coordinates print 'Transforming the coordinates to UTM...' self.XY = ll2utm(LL, self.utmzone, self.CS, self.isnorth) else: self.XY = LL else: # Multiple files self.multifile = True # Create the grid object self.grd = Grid(self.bbox, self.dx, self.dx, utmzone=self.utmzone, CS=self.CS, isnorth=self.isnorth)
def narr2suntansrad(outfile,tstart,tend,bbox,utmzone): """ Extraxts NARR model data and converts to a netcdf file format recognised by SUNTANS This function is for extracting shortwave and longwave radiation """ # Initialise the NARR object (for all variables) basefile = 'narr-b_221_' # These variables are stored in the NARR-B files narr=getNARR(tstart,tend,bbox,basefile=basefile) #Lookup table that matches variables to name in NARR file varlookup={'Hsw_up':'Upward_short_wave_radiation_flux', 'Hsw_down':'Downward_shortwave_radiation_flux',\ 'Hlw_up':'Upward_long_wave_radiation_flux','Hlw_down':'Downward_longwave_radiation_flux'} # Set some meta variables Nc = narr.nx*narr.ny nctime = convertTime(narr.time) meta={} meta.update({'Hsw_up':{'long_name':'Upward shortwave radiation','units':'W m-2','scalefactor':1.0,'addoffset':0.0}}) meta.update({'Hsw_down':{'long_name':'Downward shortwave radiation','units':'W m-2','scalefactor':1.0,'addoffset':0.0}}) meta.update({'Hlw_up':{'long_name':'Upward longwave radiation','units':'W m-2','scalefactor':1.0,'addoffset':0.0}}) meta.update({'Hlw_down':{'long_name':'Downward longwave radiation','units':'W m-2','scalefactor':1.0,'addoffset':0.0}}) # Convert the coordinates to UTM ll = np.hstack((np.reshape(narr.lon,(Nc,1)), np.reshape(narr.lat,(Nc,1)))) xy = ll2utm(ll,utmzone) # Loop through each variable and store in a dictionary output = {} coords={} for vv in varlookup.keys(): data = narr(varlookup[vv]) # Convert the units data = data*meta[vv]['scalefactor']+meta[vv]['addoffset'] output[vv] = {'Data':np.reshape(data,(narr.nt,Nc))} output[vv].update({'long_name':meta[vv]['long_name'],'units':meta[vv]['units']}) # Update the coordinates dictionary coords['x_'+vv]=xy[:,0] coords['y_'+vv]=xy[:,1] coords['z_'+vv]=narr.z * np.ones((Nc,)) # Write to NetCDF write2NC(outfile,coords,output,nctime)
def __init__(self,**kwargs): self.__dict__.update(kwargs) # Check if the input file is not a list T = type(self.infile) if T!=list: self.multifile=False # Read in the array print 'Reading data from: %s...'%self.infile if self.infile[-3:]=='.gz': LL,self.Zin = read_xyz_gz(self.infile) if self.infile[-3:]=='txt': LL,self.Zin = read_xyz(self.infile) elif self.infile[-3:]=='shp': LL,self.Zin = readShpBathy(self.infile) elif self.infile[-3:]=='dem': LL,self.Zin = readDEM(self.infile,True) if self.infile[-3:]=='.nc': self.loadnc(fv=2) LL = self._returnXY() self.Zin = np.ravel(self.Zin) self.npt = len(self.Zin) if self.convert2utm: if self.bbox==None: # Work out the domain limits from the input file pdb.set_trace() self.bbox = [LL[:,0].min(),LL[:,0].max(),LL[:,1].min(),LL[:,1].max()] else: # Clip the points outside of the domain print 'Clipping points outside of the bounding box...' LL=self.clipPoints(LL) # Convert the coordinates print 'Transforming the coordinates to UTM...' self.XY=ll2utm(LL,self.utmzone,self.CS,self.isnorth) else: self.XY=LL else: # Multiple files self.multifile=True # Create the grid object self.grd = Grid(self.bbox,self.dx,self.dx,utmzone=self.utmzone,CS=self.CS,isnorth=self.isnorth)
def oceanmodel2ic(self,ncfile,convert2utm=True,setUV=False,seth=False,name='HYCOM'): """ Interpolate data from a downloaded netcdf file to the initial condition """ print 'Loading initial condition data from ocean model netcdf file:\n\t%s...'%ncfile # Get the temperature data and coordinate data temp, nc = get_metocean_local(ncfile,'temp',name=name) # Convert to utm ll = np.vstack([nc.X.ravel(),nc.Y.ravel()]).T if convert2utm: xy = ll2utm(ll,self.utmzone,north=self.isnorth) else: xy = ll # Construct a 3D mask mask3d = temp.mask mask3d = mask3d[0,...] mask3d = mask3d.reshape((nc.nz,xy.shape[0])) # Construct the 4D interp class F4d =\ Interp4D(xy[:,0],xy[:,1],nc.Z,nc.time,\ self.xv,self.yv,self.z_r,self.time,mask=mask3d,**self.interpdict) tempnew = F4d(temp) self.T[:] = tempnew salt, nc = get_metocean_local(ncfile,'salt') saltnew = F4d(salt) self.S[:] = saltnew if seth: # Construct the 3D interp class for surface height ssh, nc = get_metocean_local(ncfile,'ssh') mask2d = ssh.mask mask2d = mask2d[0,...].ravel() F3d = Interp4D(xy[:,0],xy[:,1],None,nc.time,\ self.xv,self.yv,None,self.time,mask=mask2d,**self.interpdict) sshnew = F3d(ssh) self.h[:] = sshnew
def oceanmodel2bdy(self,ncfile,convert2utm=True,setUV=True,seth=True,name='HYCOM'): """ Interpolate data from a downloaded netcdf file to the open boundaries """ print 'Loading boundary data from ocean model netcdf file:\n\t%s...'%ncfile # Load the temperature salinity data and coordinate data temp, nc = get_metocean_local(ncfile,'temp',name=name) salt, nc = get_metocean_local(ncfile,'salt') if setUV: u, nc = get_metocean_local(ncfile,'u') v, nc = get_metocean_local(ncfile,'v') # Convert to utm ll = np.vstack([nc.X.ravel(),nc.Y.ravel()]).T if convert2utm: xy = ll2utm(ll,self.utmzone,north=self.isnorth) else: xy = ll # Construct a 3D mask mask3d = temp.mask mask3d = mask3d[0,...] mask3d = mask3d.reshape((nc.nz,xy.shape[0])) # Type 3 cells if self.N3>0: # Construct the 4D interp class F4d =\ Interp4D(xy[:,0],xy[:,1],nc.Z,nc.time,\ self.xv,self.yv,self.z,self.time,mask=mask3d,**self.interpdict) tempnew = F4d(temp) self.T[:] = tempnew saltnew = F4d(salt) self.S[:] = saltnew # Never do this for type-3 #if setUV: # unew = F4d(u) # self.u[:] += unew # vnew = F4d(v) # self.v[:] += vnew if seth: # Construct the 3D interp class for surface height ssh, nc2d = get_metocean_local(ncfile,'ssh') mask2d = ssh.mask mask2d = mask2d[0,...].ravel() F3d = Interp4D(xy[:,0],xy[:,1],None,nc2d.time,\ self.xv,self.yv,None,self.time,mask=mask2d,**self.interpdict) sshnew = F3d(ssh) self.h[:] += sshnew # Type 2 cells : no free-surface if self.N2>0: # Construct the 4D interp class F4d =\ Interp4D(xy[:,0],xy[:,1],nc.Z,nc.time,\ self.xe,self.ye,self.z,self.time,mask=mask3d,**self.interpdict) tempnew = F4d(temp) self.boundary_T[:] = tempnew saltnew = F4d(salt) self.boundary_S[:] = saltnew if setUV: unew = F4d(u) self.boundary_u[:] += unew vnew = F4d(v) self.boundary_v[:] += vnew
def interpWeatherStations(latlon,tstart,tend,dt,utmzone,dbfile, maxgap=40, showplot=False): """ Temporally interpolate weather station data onto a specified time grid""" ### # Convert to datetime format timestart = datetime.strptime(tstart,'%Y%m%d') timeend = datetime.strptime(tend,'%Y%m%d') # Create the time variable timeList = [] tnow=timestart while tnow<timeend: timeList.append(tnow) tnow += timedelta(hours=dt) nctime = convertTime(timeList) ntime = len(timeList) varnames = ['Tair','Pair','Uwind','Vwind','RH','rain','cloud'] coords={} output = {} # Read in the semi-processed data for vv in varnames: print 'Interpolating variable %s...'%vv outvar = ['NetCDF_Filename','NetCDF_GroupID','StationName'] tablename = 'observations' condition = 'Variable_Name = "%s"' % vv + \ 'and time_start <= "%s"'% datetime.strftime(timestart,'%Y-%m-%d %H:%M:%S') + \ 'and time_end >= "%s"'% datetime.strftime(timeend,'%Y-%m-%d %H:%M:%S') + \ 'and lon_start >= %3.6f '%latlon[0] + 'and lon_end <= %3.6f '%latlon[1] + \ 'and lat_start >= %3.6f '%latlon[2] + 'and lat_end <= %3.6f '%latlon[3] data, query = netcdfio.queryNC(dbfile,outvar,tablename,condition) #print data[0].keys() ii=0 for dd in data: ind = np.isfinite(np.ravel(dd[vv])) timenow = convertTime(dd['time']) timegood = timenow[ind] if nctime[0] <= timegood[0] or nctime[-1] >= timegood[-1]: data.pop(ii) else: ii+=1 # Remove points that have large gaps ii=0 for dd in data: ind = np.isfinite(dd[vv]) timenow = convertTime(dd['time']) i=-1 for t in timenow: i+=1 if t < nctime[0]: t1=i i=-1 for t in timenow: i+=1 if t < nctime[-1]: t2=i # Find the maximum gap size between the two time limits gapsize = 0 gap=0 for gg in ind[t1:t2]: if ~gg: gap+=1 if gap > gapsize: gapsize=gap else: gap = 0 #print t1,t2,len(timenow),gapsize if gapsize > maxgap: print 'Removing data point - gap size %d is > %d'%(gapsize,maxgap) data.pop(ii) else: ii+=1 #print gapsize,percgood # Work out the number of spatial points of each variable based on quality control coords['x_'+vv] = [] coords['y_'+vv] = [] coords['z_'+vv] = [] for dd in data: # Convert to utm ll = np.hstack((dd['longitude'],dd['latitude'])) xy = ll2utm(ll,utmzone) coords['x_'+vv].append(xy[0][0]) coords['y_'+vv].append(xy[0][1]) #coords['x_'+vv].append(dd['longitude']) #coords['y_'+vv].append(dd['latitude']) coords['z_'+vv].append(dd['elevation']) varlen = len(data) # Initialize the output arrays output[vv] = {'Data':np.zeros((ntime,varlen))} # Loop trough and interpolate each variables onto the time array ctr=0 for dd in data: # Interpolate the data tmp = np.array(np.ravel(dd[vv])) timenow = convertTime(dd['time']) ind = np.isfinite(tmp) F = interpolate.interp1d(timenow[ind],tmp[ind],kind='linear') varinterp = F(nctime) #F = interpolate.InterpolatedUnivariateSpline(timenow[ind],tmp[ind]) #varinterp = F(nctime) #F = interpolate.splrep(timenow[ind],tmp[ind],s=0) #varinterp = interpolate.splev(nctime,F,der=0) #F = interpolate.Rbf(timenow[ind],tmp[ind]) #varinterp = F(nctime) output[vv]['Data'][:,ctr]=varinterp ctr+=1 # Add the other info #output[vv].update({'long_name':dd[vv]['Longname'],'units':dd[vv]['Units']}) if showplot: plt.figure() plt.hold('on') plt.plot(timenow,tmp) plt.plot(nctime,varinterp,'r') plt.title(dd['StationName']+' - '+vv) plt.show() # Return the data return coords, output, nctime
def narr2suntans(outfile,tstart,tend,bbox,utmzone): """ Extraxts NARR model data and converts to a netcdf file format recognised by SUNTANS """ # Initialise the NARR object (for all variables) narr=getNARR(tstart,tend,bbox) #Lookup table that matches variables to name in NARR file varlookup={\ 'Uwind':'u_wind_height_above_ground',\ 'Vwind':'v_wind_height_above_ground',\ 'Tair':'Temperature_height_above_ground',\ 'Pair':'Pressure_reduced_to_MSL',\ 'RH':'Relative_humidity',\ 'cloud':'Total_cloud_cover',\ 'rain':'Precipitation_rate',\ } # Set some meta variables Nc = narr.nx*narr.ny nctime = convertTime(narr.time) meta={} meta.update({'Uwind':{'long_name':'Eastward wind velocity component','units':'m s-1','scalefactor':1.0,'addoffset':0.0}}) meta.update({'Vwind':{'long_name':'Northward wind velocity component','units':'m s-1','scalefactor':1.0,'addoffset':0.0}}) meta.update({'Tair':{'long_name':'Air Temperature','units':'Celsius','scalefactor':1.0,'addoffset':-273.15}}) meta.update({'Pair':{'long_name':'Air Pressure','units':'hPa','scalefactor':0.01,'addoffset':0.0}}) meta.update({'RH':{'long_name':'Relative Humidity','units':'percent','scalefactor':1.0,'addoffset':0.0}}) meta.update({'cloud':{'long_name':'Cloud cover fraction','units':'dimensionless','scalefactor':0.01,'addoffset':0.0}}) meta.update({'rain':{'long_name':'rain fall rate','units':'kg m2 s-1','scalefactor':1.0,'addoffset':0.0}}) # Convert the coordinates to UTM ll = np.hstack((np.reshape(narr.lon,(Nc,1)), np.reshape(narr.lat,(Nc,1)))) xy = ll2utm(ll,utmzone) # Loop through each variable and store in a dictionary output = {} coords={} # Get all data narrvars = [vv for vv in varlookup.itervalues()] data = narr(narrvars) # This stores all of the data in a dictionary for vv in varlookup.keys(): # Convert the units vnarr = varlookup[vv] data[vnarr] = data[vnarr]*meta[vv]['scalefactor']+meta[vv]['addoffset'] output[vv] = {'Data':np.reshape(data[vnarr],(narr.nt,Nc))} output[vv].update({'long_name':meta[vv]['long_name'],'units':meta[vv]['units']}) # Update the coordinates dictionary coords['x_'+vv]=xy[:,0] coords['y_'+vv]=xy[:,1] coords['z_'+vv]=narr.z * np.ones((Nc,)) # Write to NetCDF write2NC(outfile,coords,output,nctime)
def interpWeatherStations(latlon, tstart, tend, dt, utmzone, dbfile, maxgap=40, showplot=False): """ Temporally interpolate weather station data onto a specified time grid""" ### # Convert to datetime format timestart = datetime.strptime(tstart, '%Y%m%d') timeend = datetime.strptime(tend, '%Y%m%d') # Create the time variable timeList = [] tnow = timestart while tnow < timeend: timeList.append(tnow) tnow += timedelta(hours=dt) nctime = convertTime(timeList) ntime = len(timeList) varnames = ['Tair', 'Pair', 'Uwind', 'Vwind', 'RH', 'rain', 'cloud'] coords = {} output = {} # Read in the semi-processed data for vv in varnames: print('Interpolating variable %s...' % vv) outvar = ['NetCDF_Filename', 'NetCDF_GroupID', 'StationName'] tablename = 'observations' condition = 'Variable_Name = "%s"' % vv + \ 'and time_start <= "%s"'% datetime.strftime(timestart,'%Y-%m-%d %H:%M:%S') + \ 'and time_end >= "%s"'% datetime.strftime(timeend,'%Y-%m-%d %H:%M:%S') + \ 'and lon_start >= %3.6f '%latlon[0] + 'and lon_end <= %3.6f '%latlon[1] + \ 'and lat_start >= %3.6f '%latlon[2] + 'and lat_end <= %3.6f '%latlon[3] data, query = netcdfio.queryNC(dbfile, outvar, tablename, condition) #print data[0].keys() ii = 0 for dd in data: ind = np.isfinite(np.ravel(dd[vv])) timenow = convertTime(dd['time']) timegood = timenow[ind] if nctime[0] <= timegood[0] or nctime[-1] >= timegood[-1]: data.pop(ii) else: ii += 1 # Remove points that have large gaps ii = 0 for dd in data: ind = np.isfinite(dd[vv]) timenow = convertTime(dd['time']) i = -1 for t in timenow: i += 1 if t < nctime[0]: t1 = i i = -1 for t in timenow: i += 1 if t < nctime[-1]: t2 = i # Find the maximum gap size between the two time limits gapsize = 0 gap = 0 for gg in ind[t1:t2]: if ~gg: gap += 1 if gap > gapsize: gapsize = gap else: gap = 0 #print t1,t2,len(timenow),gapsize if gapsize > maxgap: print('Removing data point - gap size %d is > %d' % (gapsize, maxgap)) data.pop(ii) else: ii += 1 #print gapsize,percgood # Work out the number of spatial points of each variable based on quality control coords['x_' + vv] = [] coords['y_' + vv] = [] coords['z_' + vv] = [] for dd in data: # Convert to utm ll = np.hstack((dd['longitude'], dd['latitude'])) xy = ll2utm(ll, utmzone) coords['x_' + vv].append(xy[0][0]) coords['y_' + vv].append(xy[0][1]) #coords['x_'+vv].append(dd['longitude']) #coords['y_'+vv].append(dd['latitude']) coords['z_' + vv].append(dd['elevation']) varlen = len(data) # Initialize the output arrays output[vv] = {'Data': np.zeros((ntime, varlen))} # Loop trough and interpolate each variables onto the time array ctr = 0 for dd in data: # Interpolate the data tmp = np.array(np.ravel(dd[vv])) timenow = convertTime(dd['time']) ind = np.isfinite(tmp) F = interpolate.interp1d(timenow[ind], tmp[ind], kind='linear') varinterp = F(nctime) #F = interpolate.InterpolatedUnivariateSpline(timenow[ind],tmp[ind]) #varinterp = F(nctime) #F = interpolate.splrep(timenow[ind],tmp[ind],s=0) #varinterp = interpolate.splev(nctime,F,der=0) #F = interpolate.Rbf(timenow[ind],tmp[ind]) #varinterp = F(nctime) output[vv]['Data'][:, ctr] = varinterp ctr += 1 # Add the other info #output[vv].update({'long_name':dd[vv]['Longname'],'units':dd[vv]['Units']}) if showplot: plt.figure() plt.hold('on') plt.plot(timenow, tmp) plt.plot(nctime, varinterp, 'r') plt.title(dd['StationName'] + ' - ' + vv) plt.show() # Return the data return coords, output, nctime
def narr2suntansrad(outfile, tstart, tend, bbox, utmzone): """ Extraxts NARR model data and converts to a netcdf file format recognised by SUNTANS This function is for extracting shortwave and longwave radiation """ # Initialise the NARR object (for all variables) basefile = 'narr-b_221_' # These variables are stored in the NARR-B files narr = getNARR(tstart, tend, bbox, basefile=basefile) #Lookup table that matches variables to name in NARR file varlookup={'Hsw_up':'Upward_short_wave_radiation_flux', 'Hsw_down':'Downward_shortwave_radiation_flux',\ 'Hlw_up':'Upward_long_wave_radiation_flux','Hlw_down':'Downward_longwave_radiation_flux'} # Set some meta variables Nc = narr.nx * narr.ny nctime = convertTime(narr.time) meta = {} meta.update({ 'Hsw_up': { 'long_name': 'Upward shortwave radiation', 'units': 'W m-2', 'scalefactor': 1.0, 'addoffset': 0.0 } }) meta.update({ 'Hsw_down': { 'long_name': 'Downward shortwave radiation', 'units': 'W m-2', 'scalefactor': 1.0, 'addoffset': 0.0 } }) meta.update({ 'Hlw_up': { 'long_name': 'Upward longwave radiation', 'units': 'W m-2', 'scalefactor': 1.0, 'addoffset': 0.0 } }) meta.update({ 'Hlw_down': { 'long_name': 'Downward longwave radiation', 'units': 'W m-2', 'scalefactor': 1.0, 'addoffset': 0.0 } }) # Convert the coordinates to UTM ll = np.hstack((np.reshape(narr.lon, (Nc, 1)), np.reshape(narr.lat, (Nc, 1)))) xy = ll2utm(ll, utmzone) # Loop through each variable and store in a dictionary output = {} coords = {} for vv in list(varlookup.keys()): data = narr(varlookup[vv]) # Convert the units data = data * meta[vv]['scalefactor'] + meta[vv]['addoffset'] output[vv] = {'Data': np.reshape(data, (narr.nt, Nc))} output[vv].update({ 'long_name': meta[vv]['long_name'], 'units': meta[vv]['units'] }) # Update the coordinates dictionary coords['x_' + vv] = xy[:, 0] coords['y_' + vv] = xy[:, 1] coords['z_' + vv] = narr.z * np.ones((Nc, )) # Write to NetCDF write2NC(outfile, coords, output, nctime)
def narr2suntans(outfile, tstart, tend, bbox, utmzone): """ Extraxts NARR model data and converts to a netcdf file format recognised by SUNTANS """ # Initialise the NARR object (for all variables) narr = getNARR(tstart, tend, bbox) #Lookup table that matches variables to name in NARR file varlookup={\ 'Uwind':'u_wind_height_above_ground',\ 'Vwind':'v_wind_height_above_ground',\ 'Tair':'Temperature_height_above_ground',\ 'Pair':'Pressure_reduced_to_MSL',\ 'RH':'Relative_humidity',\ 'cloud':'Total_cloud_cover',\ 'rain':'Precipitation_rate',\ } # Set some meta variables Nc = narr.nx * narr.ny nctime = convertTime(narr.time) meta = {} meta.update({ 'Uwind': { 'long_name': 'Eastward wind velocity component', 'units': 'm s-1', 'scalefactor': 1.0, 'addoffset': 0.0 } }) meta.update({ 'Vwind': { 'long_name': 'Northward wind velocity component', 'units': 'm s-1', 'scalefactor': 1.0, 'addoffset': 0.0 } }) meta.update({ 'Tair': { 'long_name': 'Air Temperature', 'units': 'Celsius', 'scalefactor': 1.0, 'addoffset': -273.15 } }) meta.update({ 'Pair': { 'long_name': 'Air Pressure', 'units': 'hPa', 'scalefactor': 0.01, 'addoffset': 0.0 } }) meta.update({ 'RH': { 'long_name': 'Relative Humidity', 'units': 'percent', 'scalefactor': 1.0, 'addoffset': 0.0 } }) meta.update({ 'cloud': { 'long_name': 'Cloud cover fraction', 'units': 'dimensionless', 'scalefactor': 0.01, 'addoffset': 0.0 } }) meta.update({ 'rain': { 'long_name': 'rain fall rate', 'units': 'kg m2 s-1', 'scalefactor': 1.0, 'addoffset': 0.0 } }) # Convert the coordinates to UTM ll = np.hstack((np.reshape(narr.lon, (Nc, 1)), np.reshape(narr.lat, (Nc, 1)))) xy = ll2utm(ll, utmzone) # Loop through each variable and store in a dictionary output = {} coords = {} # Get all data narrvars = [vv for vv in varlookup.values()] data = narr(narrvars) # This stores all of the data in a dictionary for vv in list(varlookup.keys()): # Convert the units vnarr = varlookup[vv] data[vnarr] = data[vnarr] * meta[vv]['scalefactor'] + meta[vv][ 'addoffset'] output[vv] = {'Data': np.reshape(data[vnarr], (narr.nt, Nc))} output[vv].update({ 'long_name': meta[vv]['long_name'], 'units': meta[vv]['units'] }) # Update the coordinates dictionary coords['x_' + vv] = xy[:, 0] coords['y_' + vv] = xy[:, 1] coords['z_' + vv] = narr.z * np.ones((Nc, )) # Write to NetCDF write2NC(outfile, coords, output, nctime)
import numpy as np from hybridgrid import HybridGrid from gmsh import grd2suntans from maptools import ll2utm ### # Inputs lon0 = -94.86367 lat0 = 29.29517 utmzone= 15 outpath = 'rundata' nx = 4 ### xy = ll2utm(np.array((lon0,lat0)),utmzone) x0 = xy[0,0] y0 = xy[0,1] # Create the cell outline dx = 1000.*nx dx2 = dx/2. xlims = [x0-dx2,x0+dx2] ylims = [y0-dx2,y0+dx2] xgrd = np.linspace(xlims[0],xlims[1],nx) ygrd = np.linspace(ylims[0],ylims[1],nx) cells=[] xp = [] yp = []
def build(self): tic=time.clock() if self.multifile==False: if self.interptype=='nn': print 'Building DEM with Nearest Neighbour interpolation...' self.nearestNeighbour() elif self.interptype=='blockavg': print 'Building DEM with Block Averaging...' self.blockAvg() elif self.interptype=='idw': print 'Building DEM with Inverse Distance Weighted Interpolation...' self.invdistweight() elif self.interptype=='kriging': print 'Building DEM with Kriging Interpolation...' self.krig() elif self.interptype=='griddata': print 'Building DEM using griddata...' self.griddata() else: print 'Error - Unknown interpolation type: %s.'%self.interptype else: # Multiple file interpolation print 'Multiple input files detected - setting "interptype" to "blockavg".' self.interptype = 'blockavg' self.Z = np.zeros((self.grd.ny,self.grd.nx)) self.N = np.zeros((self.grd.ny,self.grd.nx)) ctr=0 for f in self.infile: ctr+=1 # Read in the array print 'Reading data file (%d of %d): %s...'%(ctr,len(self.infile),f) if f[-3:]=='.gz': LL,self.Zin = read_xyz_gz(f) if f[-3:]=='txt': LL,self.Zin = read_xyz(f) elif f[-3:]=='shp': LL,self.Zin = readShpBathy(f) elif f[-3:]=='dem': LL,self.Zin = readDEM(f,True) self.npt = len(self.Zin) if self.convert2utm: # Clip the points outside of the domain #print 'Clipping points outside of the bounding box...' #LL=self.clipPoints(LL) # Convert the coordinates print 'Transforming the coordinates to UTM...' self.XY=ll2utm(LL,self.utmzone,self.CS,self.isnorth) else: self.XY=LL del LL # Interpolate print 'Building DEM with Block Averaging...' self.blockAvgMulti() # Memory cleanup del self.XY del self.Zin # Compute the block average for all of the files self.Z = np.divide(self.Z,self.N) toc=time.clock() print 'Elapsed time %10.3f seconds.'%(toc-tic)
def build(self): tic = time.clock() if self.multifile == False: if self.interptype == 'nn': print 'Building DEM with Nearest Neighbour interpolation...' self.nearestNeighbour() elif self.interptype == 'blockavg': print 'Building DEM with Block Averaging...' self.blockAvg() elif self.interptype == 'idw': print 'Building DEM with Inverse Distance Weighted Interpolation...' self.invdistweight() elif self.interptype == 'kriging': print 'Building DEM with Kriging Interpolation...' self.krig() elif self.interptype == 'griddata': print 'Building DEM using griddata...' self.griddata() else: print 'Error - Unknown interpolation type: %s.' % self.interptype else: # Multiple file interpolation print 'Multiple input files detected - setting "interptype" to "blockavg".' self.interptype = 'blockavg' self.Z = np.zeros((self.grd.ny, self.grd.nx)) self.N = np.zeros((self.grd.ny, self.grd.nx)) ctr = 0 for f in self.infile: ctr += 1 # Read in the array print 'Reading data file (%d of %d): %s...' % ( ctr, len(self.infile), f) if f[-3:] == '.gz': LL, self.Zin = read_xyz_gz(f) if f[-3:] == 'txt': LL, self.Zin = read_xyz(f) elif f[-3:] == 'shp': LL, self.Zin = readShpBathy(f) elif f[-3:] == 'dem': LL, self.Zin = readDEM(f, True) self.npt = len(self.Zin) if self.convert2utm: # Clip the points outside of the domain #print 'Clipping points outside of the bounding box...' #LL=self.clipPoints(LL) # Convert the coordinates print 'Transforming the coordinates to UTM...' self.XY = ll2utm(LL, self.utmzone, self.CS, self.isnorth) else: self.XY = LL del LL # Interpolate print 'Building DEM with Block Averaging...' self.blockAvgMulti() # Memory cleanup del self.XY del self.Zin # Compute the block average for all of the files self.Z = np.divide(self.Z, self.N) toc = time.clock() print 'Elapsed time %10.3f seconds.' % (toc - tic)