Ejemplo n.º 1
0
    def save_results(self):
        if self.nc is None:
            return

        output_file = self.get_output()

        if output_file is None:
            print "No output file selected; cannot proceed."
            return

        print "Saving the mask to %s" % output_file

        nc = NC(output_file, 'w')

        nc.createDimension('x', self.x.size)
        nc.createDimension('y', self.y.size)

        x = nc.createVariable("x", 'f8', ('x',))
        y = nc.createVariable("y", 'f8', ('y',))
        mask = nc.createVariable("ftt_mask", 'i4', ('y', 'x'))

        mask.long_name = "Drainage basin area for regional modeling"

        x_orig = self.nc.variables['x']
        y_orig = self.nc.variables['y']

        for var, old_var in zip([x,y], [x_orig, y_orig]):
            for attr in old_var.ncattrs():
                var.setncattr(attr, old_var.getncattr(attr))

        x[:] = self.x
        y[:] = self.y

        nc.variables['ftt_mask'][:] = (self.mask == 2)
        nc.close()

        print "Done."
Ejemplo n.º 2
0
                                     radius_of_influence=radius_of_influence,
                                     epsilon=0.5,
                                     fill_value=fill_value,
                                     nprocs=nprocs)

# binary density (0: no data, 1: data)
rho = np.ones_like(result)
rho[result==fill_value] = 0

# Make a quick plot
fig_str = out_filename.split('.')[0]
fig_name = fig_str + '.png'
pr.plot.save_quicklook(fig_name, area_def, result, label='ice thickness')

# Write the data:
nc = CDF(out_filename, "w", format="NETCDF3_CLASSIC")
nc.createDimension("y", size=northing.shape[0])
nc.createDimension("x", size=easting.shape[0])
x = nc.createVariable("x", 'f', dimensions=("x",))
x.units = "m";
x.long_name = "easting"
x.standard_name = "projection_x_coordinate"

y = nc.createVariable("y", 'f', dimensions=("y",))
y.units = "m";
y.long_name = "northing"
y.standard_name = "projection_y_coordinate"

mapping_var = 'mapping'
mapping = nc.createVariable(mapping_var, 'b')
mapping.grid_mapping_name = "polar_stereographic"
Ejemplo n.º 3
0
do_bmelt = options.do_bmelt
parameters['allow_extrapolation'] = True
project_name = str(options.project_name)
grid_spacing = options.grid_spacing
# Misfit penalty.  Penalized differences between the calculated and observed thickness.
gamma = options.gamma
# Regularization parameter (penalty on the gradient of the solution)
alpha = options.alpha
# Velocity scale
velocity_scale = options.velocity_scale
# minimum ice thickness
thk_min = 10

output_order = ("x", "y")
filename = project_name + '_flightlines_' + str(grid_spacing) + 'm.nc'
nc = CDF(filename, 'r')
xdim, ydim, zdim, tdim = get_dims(nc)
x = nc.variables[xdim][:]
y = nc.variables[ydim][:]
proj4_str = nc.projection
input_dimensions = nc.variables["thk"].dimensions
rho = np.squeeze(permute(nc.variables["rho"], output_order=output_order))
# rho may be a masked array, which dolfin cannot handle
# here we remove the mask
rho = np.array(rho)
Hfl = np.squeeze(permute(nc.variables["thk"], output_order=output_order))
M = len(x)
xmin = x[0]
xmax = x[-1]
N = len(y)
ymin = y[0]
Ejemplo n.º 4
0
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA

import numpy as np

try:
    from netCDF3 import Dataset as CDF
except:
    from netCDF4 import Dataset as CDF

x, topg, thk = np.loadtxt('sg_35m_flowline.txt', unpack=True)

output = 'storglaciaren_flowline.nc'

# Write the data:
print("Writing the data to '%s'... " % output)
nc = CDF(output, "w")
nc.createDimension("x", size=len(x))

x_var = nc.createVariable("x", 'f', dimensions=("x", ))
x_var.units = "m"
x_var[:] = x

topg_var = nc.createVariable("topg", 'f', dimensions=("x", ))
topg_var.units = "m"
topg_var.standard_name = "bedrock_altitude"
topg_var[:] = topg

thk_var = nc.createVariable("thk", 'f', dimensions=("x", ))
thk_var.units = "m"
thk_var.standard_name = "land_ice_thickness"
thk_var[:] = thk
Ejemplo n.º 5
0
except:
    from netCDF4 import Dataset as CDF
from argparse import ArgumentParser


# Set up the Argument parser
description = '''A script to add EPSG:3413 mapping information to a netCDF file.'''
parser = ArgumentParser()
parser.description = description
parser.add_argument("FILE", nargs='*')
options = parser.parse_args()
args = options.FILE

infile = args[0]

nc = CDF(infile, 'a')


mapping_var = 'mapping'
for var in nc.variables.keys():
    if hasattr(var, 'grid_mapping'):
        mapping_var = var.grid_mapping
        pass

if not var in nc.variables.keys():
    mapping = nc.createVariable(mapping_var, 'b')

else:
    mapping = nc.variables[mapping_var]
mapping.grid_mapping_name = "polar_stereographic"
mapping.latitude_of_projection_origin = 90.
Ejemplo n.º 6
0
    mapping = map(lambda(x): dimensions.index(x),
                  input_dimensions)

    if mapping:
        return np.transpose(variable[:], mapping)
    else:
        return variable[:]              # so that it does not break processing "mapping"


for NCNAME in NCNAMES:
    
    PREFIX,d = NCNAME.split('.')
    print "  opening NetCDF file %s ..." % NCNAME
    try:
        # open netCDF file in 'append' mode
        nc = NC(NCNAME, 'r')
    except:
        print "greenplot.py ERROR:  file '%s' not found or not NetCDF format ... ending ..." % NCNAME
        exit(1)

    # we need to know longitudes and latitudes corresponding to out grid
    lon = np.squeeze(permute(nc.variables['lon']))
    lat = np.squeeze(permute(nc.variables['lat']))
    # need thickness for some masking operations
    thk = np.squeeze(permute(nc.variables['thk']))

    # surface can be drawn as alpha<1 transparent surface for when showing basal fields
    usurf = np.squeeze(permute(nc.variables['usurf']))
    musurf = np.ma.array(usurf, mask = (thk <= thk_min))

    # x and y *from the dataset* are only used to determine the plotting domain
Ejemplo n.º 7
0
from netCDF3 import Dataset
import numpy as np

in_file='/home/phil/public_html/courses/atsc500/code/matlab/BOMEX_256x256x150_25m_20m_2s_16_0000009240.nc'
out_file='subset.nc'
new_x=np.arange(128,178)
new_y=np.arange(128,198)
nc_in=Dataset(in_file)
try:
    nc_out=Dataset(out_file,'ws','NETCDF3_CLASSIC')
except:
    os.unlink('outfile.nc')
    nc_out=Dataset('outfile.nc','ws','NETCDF3_CLASSIC')

## copy attributes
for the_att in nc_in.ncattrs():
    setattr(nc_out,the_att,getattr(nc_in,the_att).strip())

##get one-d vars
npz_vars={}
one_d={}
out_vars={}
for the_dim in ['x','y','z']:
    one_d[the_dim]=nc_in.variables[the_dim]

npz_vars={}

sub_x=one_d['x'][new_x]
sub_y=one_d['y'][new_y]
sub_z=one_d['z'][:]
nc_out.createDimension('x',len(sub_x))
Ejemplo n.º 8
0
parser.add_argument("FILE", nargs='*')
parser.add_argument("-n","--no_procs", dest="nprocs", type=int,
                  help='''No. of cores used for resamping.''', default=8)

options = parser.parse_args()
args = options.FILE
data_file = args[0]
if len(args) > 0:
    out_filename = args[1]
else:
    out_filename = "foo.nc"
nprocs = options.nprocs



nc = CDF(data_file, "r")
x_in = nc.variables['x'][:]
y_in = nc.variables['y'][:]
M_in = len(x_in)
N_in = len(y_in)
x_in_min = x_in[0]
x_in_max = x_in[-1]
y_in_min = y_in[0]
y_in_max = y_in[-1]
data = np.squeeze(nc.variables['mask'][:])
radius_of_influence = x_in[1] - x_in[0]  # m
nc.close()

area_id = 'greenland'
area_name = 'Greenland'
proj_id = 'Polar Stereo'
Ejemplo n.º 9
0
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA

import numpy as np

try:
    from netCDF3 import Dataset as CDF
except:
    from netCDF4 import Dataset as CDF

x,topg,thk = np.loadtxt('sg_35m_flowline.txt',unpack=True)

output = 'storglaciaren_flowline.nc'

# Write the data:
print("Writing the data to '%s'... " % output)
nc = CDF(output, "w")
nc.createDimension("x", size=len(x))

x_var = nc.createVariable("x", 'f', dimensions=("x",))
x_var.units = "m";
x_var[:] = x

topg_var = nc.createVariable("topg", 'f', dimensions=("x",))
topg_var.units = "m";
topg_var.standard_name = "bedrock_altitude"
topg_var[:] = topg

thk_var = nc.createVariable("thk", 'f', dimensions=("x",))
thk_var.units = "m";
thk_var.standard_name = "land_ice_thickness"
thk_var[:] = thk
Ejemplo n.º 10
0
from netCDF3 import Dataset
from scipy.interpolate import UnivariateSpline
import matplotlib.pyplot as plt
import numpy as np

filename='soundings.nc';
nc_file=Dataset(filename)
var_names=nc_file.variables.keys()
print "variable names: ",var_names
print "global attributes: ",nc_file.ncattrs()
#print "col_names: ",nc_file.col_names  This line gives me an error

fig=plt.figure(1)
fig.clf()
ax1=fig.add_subplot(111)
z_interp=np.arange(2000.,25000.,100.)
Temp_array = np.zeros(len(z_interp)) #initial array to start stacking Temperatures
for var_name,the_var in nc_file.variables.items():
     print var_name, the_var
     interp_temp=UnivariateSpline(the_var[:,1],the_var[:,2]) #note that Feb-28-2
                                                             # 0012-00z only goe
                                                             #s up to 11820
     Temp_array = np.vstack((Temp_array, interp_temp(z_interp))) #stacking Temperatures
     ax1.plot(interp_temp(z_interp),z_interp)
     fig.canvas.draw()
     plt.title('Interpolated Temperatures vs Height')
     plt.xlabel('Temperature(C)')
     plt.ylabel('Height(m)')
plt.show()

Temp_array = np.delete(Temp_array, 0, 0) #deleting initial row of zeros
Ejemplo n.º 11
0
picfile=open(picfile,'r')
the_data=pickle.load(picfile)

picfile.close()
newDict=the_data['data']

array_list=[]
for key,value in newDict.items():
    the_date=dateutil.parser.parse(key)
    array_list.append((the_date,key,value))

array_list.sort()
the_dates=[item[1] for item in array_list]

try:
    ncfile=Dataset('soundings.nc','ws','NETCDF3_CLASSIC')
except:
    os.unlink('output.nc')
    ncfile=Dataset('output.nc','ws','NETCDF3_CLASSIC')

keep_arrays=list()
array_lengths=list()
for dt_object,date_name,the_array in array_list[6:11]:
    np_array=np.frombuffer(the_array.data)
    n_cols=len(the_array.dtype)
    np_array=np.reshape(np_array,(-1,n_cols))
    keep_arrays.append((np_array.shape[0],np_array,date_name))
    array_lengths.append(np_array.shape[0])

def add_dim(ncfile,dim_length,dim_name=None):
    if dim_name is None:
Ejemplo n.º 12
0
from optparse import OptionParser

parser = OptionParser()
parser.usage = "usage: %prog [options] FILE"
parser.description = "A script to compare PISM flowline velocities with full Stokes solution."

(options, args) = parser.parse_args()

plot_acab = True

if len(args) != 1:
    print('wrong number of arguments, 1 expected')
    exit(1)

try:
    nc = Dataset(args[0], 'r')
except:
    print(("file %s not found ... ending ..." % args[0]))
    exit(2)


def permute(variable, output_order=('t', 'z', 'zb', 'y', 'x')):
    """Permute dimensions of a NetCDF variable to match the output storage order."""
    input_dimensions = variable.dimensions

    # filter out irrelevant dimensions
    dimensions = filter(lambda (x): x in input_dimensions, output_order)

    # create the mapping
    mapping = map(lambda (x): dimensions.index(x), input_dimensions)