def resample_array(input_data, input_lat, input_lon, input_dict=None): """ Code snippets for resampling the input array on to a new grid, changing the missing value or reducing the data type or floating point precision of the array. Edit the components of this routine for your particular needs. Just check that input_data and output_data arrays are being used and referenced as required. """ # Check missing value # Some files gave an incorrect missing value, so we check for the # actual missing value before using/replacing it. checkfor = 99999.9 if np.sum(np.where(input_data==checkfor,1,0)) > input_data.size*0.7: input_dict.update({'missing':checkfor}) # Copy dict if input_dict is None: output_dict = dict() else: output_dict = copy.deepcopy(input_dict) # Output dimensions (xs, xn, xc) = (112, 841, 0.05) # start, number, cellsize (ys, yn, yc) = (-44, 681, 0.05) # start, number, cellsize output_lon = nr.create_vector(xs, xn, xc) output_lat = nr.create_vector(ys, yn, yc) output_lat = output_lat[::-1] # reverse elements # Create output array output_data = np.zeros((output_lat.size,output_lon.size)) \ + input_dict['missing'] # Copy data onto output grid output_data = nr.copy_grids(input_data[0,:,:],input_lon,input_lat, output_data,output_lon,output_lat) output_data.shape = (1,yn,xn) output_dict.update({'xmin':min(output_lon), 'xmax':max(output_lon), 'xstep':xc, 'xnum':xn, 'ymin':min(output_lat), 'ymax':max(output_lat), 'ystep':yc, 'ynum':yn,}) # Reduce precision of values to 1 decimal place and convert to f32 output_data = output_data.round(decimals=1) output_data = np.float32(output_data) output_dict.update({'datatype':'f4'}) # Change missing value miss = -999.0 if input_dict['missing'] != miss: output_data = nr.replace_values(output_data,input_dict['missing'],miss) output_dict.update({'missing':miss}) return (output_data, output_lat, output_lon, output_dict)
def resample_array(input_data, input_lat, input_lon, input_dict=None): """ Code snippets for resampling the input array on to a new grid, changing the missing value or reducing the data type or floating point precision of the array. Edit the components of this routine for your particular needs. Just check that input_data and output_data arrays are being used and referenced as required. """ # Copy dict if input_dict is None: output_dict = dict() else: output_dict = copy.deepcopy(input_dict) # Output dimensions (xs, xn, xc) = (112, 841, 0.05) # start, number, cellsize (ys, yn, yc) = (-44, 681, 0.05) # start, number, cellsize output_lon = nr.create_vector(xs, xn, xc) output_lat = nr.create_vector(ys, yn, yc) output_lat = output_lat[::-1] # reverse elements # Create output array output_data = np.zeros((output_lat.size,output_lon.size)) \ + input_dict['missing'] # Copy data onto output grid output_data = nr.copy_grids(input_data, input_lon, input_lat, output_data, output_lon, output_lat) output_dict.update({ 'xmin': min(output_lon), 'xmax': max(output_lon), 'xstep': xc, 'xnum': xn, 'ymin': min(output_lat), 'ymax': max(output_lat), 'ystep': yc, 'ynum': yn, }) # Reduce precision of values to 1 decimal place and convert to f32 output_data = input_data.round(decimals=1) output_data = np.float32(input_data) output_dict.update({'datatype': 'f4'}) # Change missing value miss = -999 output_data = nr.replace_values(input_data, input_dict['missing'], miss) output_dict.update({'missing': miss}) return (output_data, output_lat, output_lon, output_dict)
def make_latlon_vectors(meta): """ Create latitude and longitude vectors from the values contained in the Arc header dictionary, meta. Returned as a tuple of (latitude,longitude). """ # Convert corners to centers if required if 'xllcorner' in meta: meta['xllcenter'] = float(meta['xllcorner'])+float(meta['cellsize'])/2 if 'yllcorner' in meta: meta['yllcenter'] = float(meta['yllcorner'])+float(meta['cellsize'])/2 # Create vectors input_lon = nr.create_vector(float(meta['xllcenter']), \ float(meta['ncols']), \ float(meta['cellsize'])) input_lat = nr.create_vector(float(meta['yllcenter']), \ float(meta['nrows']), \ float(meta['cellsize'])) input_lat = input_lat[::-1] # reverse elements return input_lat,input_lon
def set_latlon(meta, datadict=None): """ Create latitude and longitude vectors from the values contained in the Arc header dictionary, meta. Returned as a tuple of (latitude,longitude,datadict). """ # Convert corners to centers if required if 'xllcorner' in meta: meta['xllcenter'] = float( meta['xllcorner']) + float(meta['cellsize']) / 2 if 'yllcorner' in meta: meta['yllcenter'] = float( meta['yllcorner']) + float(meta['cellsize']) / 2 # Create vectors lonvec = nr.create_vector(float(meta['xllcenter']), \ float(meta['ncols']), \ float(meta['cellsize'])) latvec = nr.create_vector(float(meta['yllcenter']), \ float(meta['nrows']), \ float(meta['cellsize'])) latvec = latvec[::-1] # reverse elements # Create/update a datadict, which includes standardised labels for later if datadict is None: datadict = dict() datadict.update({ 'xmin': min(lonvec), 'xmax': max(lonvec), 'xstep': float(meta['cellsize']), 'xnum': int(meta['ncols']), 'xunits': 'degrees_east', 'ymin': min(latvec), 'ymax': max(latvec), 'ystep': float(meta['cellsize']), 'ynum': int(meta['nrows']), 'yunits': 'degrees_north' }) # Return a tuple return latvec, lonvec, datadict
def set_latlon(meta, datadict=None): """ Create latitude and longitude vectors from the values contained in the Arc header dictionary, meta. Returned as a tuple of (latitude,longitude,datadict). """ # Convert corners to centers if required if 'xllcorner' in meta: meta['xllcenter'] = float(meta['xllcorner'])+float(meta['cellsize'])/2 if 'yllcorner' in meta: meta['yllcenter'] = float(meta['yllcorner'])+float(meta['cellsize'])/2 # Create vectors lonvec = nr.create_vector(float(meta['xllcenter']), \ float(meta['ncols']), \ float(meta['cellsize'])) latvec = nr.create_vector(float(meta['yllcenter']), \ float(meta['nrows']), \ float(meta['cellsize'])) latvec = latvec[::-1] # reverse elements # Create/update a datadict, which includes standardised labels for later if datadict is None: datadict = dict() datadict.update({ 'xmin':min(lonvec), 'xmax':max(lonvec), 'xstep':float(meta['cellsize']), 'xnum':int(meta['ncols']), 'xunits':'degrees_east', 'ymin':min(latvec), 'ymax':max(latvec), 'ystep':float(meta['cellsize']), 'ynum':int(meta['nrows']), 'yunits':'degrees_north' }) # Return a tuple return latvec,lonvec,datadict
def test_spatial(): import numpy_routines as npr xvec = npr.create_vector(115, 10, 5) # If ref only min=max=index(ref) print 'ref in: want 3,3:',\ values_to_indices_spatial(xvec,ref=130.2) print 'ref dn: want 0,0:',\ values_to_indices_spatial(xvec,ref=100.2) print 'ref up: want 9,9:',\ values_to_indices_spatial(xvec,ref=230.2) # If less only min=index(less), max=len(vec)-1 print 'ref in: want 3,9:',\ values_to_indices_spatial(xvec,less=130.2) print 'ref dn: want 0,9:',\ values_to_indices_spatial(xvec,less=100.2) print 'ref up: want 9,9:',\ values_to_indices_spatial(xvec,less=230.2) # If more only min=0, max=index(more) print 'ref in: want 0,3:',\ values_to_indices_spatial(xvec,more=130.2) print 'ref dn: want 0,0:',\ values_to_indices_spatial(xvec,more=100.2) print 'ref up: want 0,9:',\ values_to_indices_spatial(xvec,more=230.2) # If ref and less min=index(ref-less), max=len(vec)-1 print 'ref-less in: want 2,9:',\ values_to_indices_spatial(xvec,ref=130.2,less=750,lessunit='km') # If ref and more min=0, max=index(ref+more) print 'ref+more in: want 0,5:',\ values_to_indices_spatial(xvec,ref=130.2,more=750,moreunit='km') # If less and more min=index(less), max=index(more) print 'less,more in: want 3,7:',\ values_to_indices_spatial(xvec,less=130.2,more=148) # If ref,less,more min=index(ref-less), max=index(ref+more) print 'ref-less,ref+more in: want 2,5:',\ values_to_indices_spatial(xvec,ref=130.2,less=750,lessunit='km',\ more=750,moreunit='km')
def test_spatial(): import numpy_routines as npr xvec = npr.create_vector(115,10,5) # If ref only min=max=index(ref) print 'ref in: want 3,3:',\ values_to_indices_spatial(xvec,ref=130.2) print 'ref dn: want 0,0:',\ values_to_indices_spatial(xvec,ref=100.2) print 'ref up: want 9,9:',\ values_to_indices_spatial(xvec,ref=230.2) # If less only min=index(less), max=len(vec)-1 print 'ref in: want 3,9:',\ values_to_indices_spatial(xvec,less=130.2) print 'ref dn: want 0,9:',\ values_to_indices_spatial(xvec,less=100.2) print 'ref up: want 9,9:',\ values_to_indices_spatial(xvec,less=230.2) # If more only min=0, max=index(more) print 'ref in: want 0,3:',\ values_to_indices_spatial(xvec,more=130.2) print 'ref dn: want 0,0:',\ values_to_indices_spatial(xvec,more=100.2) print 'ref up: want 0,9:',\ values_to_indices_spatial(xvec,more=230.2) # If ref and less min=index(ref-less), max=len(vec)-1 print 'ref-less in: want 2,9:',\ values_to_indices_spatial(xvec,ref=130.2,less=750,lessunit='km') # If ref and more min=0, max=index(ref+more) print 'ref+more in: want 0,5:',\ values_to_indices_spatial(xvec,ref=130.2,more=750,moreunit='km') # If less and more min=index(less), max=index(more) print 'less,more in: want 3,7:',\ values_to_indices_spatial(xvec,less=130.2,more=148) # If ref,less,more min=index(ref-less), max=index(ref+more) print 'ref-less,ref+more in: want 2,5:',\ values_to_indices_spatial(xvec,ref=130.2,less=750,lessunit='km',\ more=750,moreunit='km')
def resample_data(datarows,metalist): """ Convert a list of lists of (string) values to a 2D NumPy array. Create dimension vectors based on extent and cell-size information in 'metalist'. Create target dimension vectors based on pre-defined extents. Check the correctness of the given missing value (in metalist) against the data. Replace all missing values with a standard missing value. Copy data from the input array to an output array defined by the target dimension vectors. The precision of values in the output array are reduced to 1 decimal place (for consistency). Return the output array, target dimension vectors and a dict that gives the extents of the target dimension vectors and the new missing value. """ # Parse metalist into a dict meta = {y[0].lower():float(y[1]) for y in [x.split() for x in metalist]} miss = -999.0 # ASCII dimensions input_lon = nr.create_vector(meta['xllcorner']+meta['cellsize']/2.0, \ meta['ncols'], \ meta['cellsize']) input_lat = nr.create_vector(meta['yllcorner']+meta['cellsize']/2.0, \ meta['nrows'], \ meta['cellsize']) input_lat = input_lat[::-1] # reverse elements # Output dimensions - not remapping, so commented out #(xs, xn, xc) = (112, 841, 0.05) # start, number, cellsize #(ys, yn, yc) = (-44, 681, 0.05) # start, number, cellsize #output_lon = nr.create_vector(xs, xn, xc) #output_lat = nr.create_vector(ys, yn, yc) #output_lat = output_lat[::-1] # reverse elements # Copy datarows into a 2D array input_data = np.array(datarows,dtype=np.float64) meta['nodata_value'] = \ check_bom_missing(input_data[0,:], 99999.9, meta['nodata_value']) if meta['nodata_value']!=miss: input_data = nr.replace_values(input_data,meta['nodata_value'],miss) print "Replaced missing data %s with %s" % (meta['nodata_value'],miss) # Create output array #output_data = np.zeros((output_lat.size,output_lon.size))+miss # Copy data onto output grid #output_data = nr.copy_grids(input_data,input_lon,input_lat, # output_data,output_lon,output_lat) # Reduce precision of values to 1 decimal place and convert to f32 #output_data = output_data.round(decimals=1) #output_data = np.float32(output_data) input_data = input_data.round(decimals=1) input_data = np.float32(input_data) input_dict = {'xmin':min(input_lon).round(decimals=3), 'xmax':max(input_lon).round(decimals=3), 'xstep':meta['cellsize'], 'xnum':meta['ncols'], 'xunits':'degrees_east', 'ymin':min(input_lat).round(decimals=3), 'ymax':max(input_lat).round(decimals=3), 'ystep':meta['cellsize'], 'ynum':meta['nrows'], 'yunits':'degrees_north', 'missing':miss} return (input_data, input_lat, input_lon, input_dict)
def flttonc(fltstem, ncfile, varname, yyyymmdd, units=None): """Main function to process a binary flt and hdr with name FLTSTEM to netCDF file NCFILE with variable name VARNAME. Time is set to the date corresponding to YYYYMMDD. Units can be provided optionally. """ d = datetime.strptime(yyyymmdd, "%Y%M%d") # Read the metadata from the header meta = {} r = re.compile("^(\S+)\s+(\S+)$") f = open(fltstem+'.hdr','r') for line in f: line=line.strip() m = re.match(r, line) if m: if m.group(1).lower() == 'byteorder': meta[m.group(1).lower()] = m.group(2) else: meta[m.group(1).lower()] = float(m.group(2)) f.close() # Get the data and shape it appropriately a=np.fromfile(fltstem+'.flt',dtype=np.float32) a.reshape(int(meta['ncols']), int(meta['nrows']), 1) # Make the lon and lat coordinate variables if 'xllcenter' in meta: lon = nr.create_vector(meta['xllcenter'], \ meta['ncols'], \ meta['cellsize']) else: lon = nr.create_vector(meta['xllcorner']+0.5*meta['cellsize'], \ meta['ncols'], \ meta['cellsize']) if 'yllcenter' in meta: lat = nr.create_vector(meta['yllcenter'], \ meta['nrows'], \ meta['cellsize']) else: lat = nr.create_vector(meta['yllcorner']+0.5*meta['cellsize'], \ meta['nrows'], \ meta['cellsize']) # Reverse the latitude elements so they run from North to South lat = lat[::-1] # Make a :history global attribute history = [] now = datetime.now() history.append("flttonc at %04d-%02d-%02dT%02d:%02d:%02d" % (now.year, now.month, now.day, now.hour, now.minute, now.second)) history.append("Input file: "+fltstem+".flt") attr = {} if 'nodata_value' in meta: attr[varname+':_FillValue'] = meta['nodata_value'] if units: attr[varname+':units'] = units attr['history'] = '\n'.join(history) # Write the netCDF file ncobj = nh.nc3_open(ncfile,'w') nh.nc3_set_timelatlon(ncobj,1,len(lat),len(lon)) nh.nc3_set_var(ncobj,varname) nh.nc3_set_var(ncobj,'wgs84',dims=()) # Grid mapping container nh.nc3_add_time(ncobj,[d]) nh.nc3_add_data(ncobj,'latitude',lat) nh.nc3_add_data(ncobj,'longitude',lon) nh.nc3_add_data(ncobj,varname,a) nh.nc3_set_attributes(ncobj,attr) nh.nc3_close(ncobj) return ncfile
def resample_data(datarows, metalist): """ Convert a list of lists of (string) values to a 2D NumPy array. Create dimension vectors based on extent and cell-size information in 'metalist'. Create target dimension vectors based on pre-defined extents. Check the correctness of the given missing value (in metalist) against the data. Replace all missing values with a standard missing value. Copy data from the input array to an output array defined by the target dimension vectors. The precision of values in the output array are reduced to 1 decimal place (for consistency). Return the output array, target dimension vectors and a dict that gives the extents of the target dimension vectors and the new missing value. """ # Parse metalist into a dict meta = {y[0].lower(): float(y[1]) for y in [x.split() for x in metalist]} miss = -999.0 # ASCII dimensions input_lon = nr.create_vector(meta['xllcorner']+meta['cellsize']/2.0, \ meta['ncols'], \ meta['cellsize']) input_lat = nr.create_vector(meta['yllcorner']+meta['cellsize']/2.0, \ meta['nrows'], \ meta['cellsize']) input_lat = input_lat[::-1] # reverse elements # Output dimensions - not remapping, so commented out #(xs, xn, xc) = (112, 841, 0.05) # start, number, cellsize #(ys, yn, yc) = (-44, 681, 0.05) # start, number, cellsize #output_lon = nr.create_vector(xs, xn, xc) #output_lat = nr.create_vector(ys, yn, yc) #output_lat = output_lat[::-1] # reverse elements # Copy datarows into a 2D array input_data = np.array(datarows, dtype=np.float64) meta['nodata_value'] = \ check_bom_missing(input_data[0,:], 99999.9, meta['nodata_value']) if meta['nodata_value'] != miss: input_data = nr.replace_values(input_data, meta['nodata_value'], miss) print "Replaced missing data %s with %s" % (meta['nodata_value'], miss) # Create output array #output_data = np.zeros((output_lat.size,output_lon.size))+miss # Copy data onto output grid #output_data = nr.copy_grids(input_data,input_lon,input_lat, # output_data,output_lon,output_lat) # Reduce precision of values to 1 decimal place and convert to f32 #output_data = output_data.round(decimals=1) #output_data = np.float32(output_data) input_data = input_data.round(decimals=1) input_data = np.float32(input_data) input_dict = { 'xmin': min(input_lon).round(decimals=3), 'xmax': max(input_lon).round(decimals=3), 'xstep': meta['cellsize'], 'xnum': meta['ncols'], 'xunits': 'degrees_east', 'ymin': min(input_lat).round(decimals=3), 'ymax': max(input_lat).round(decimals=3), 'ystep': meta['cellsize'], 'ynum': meta['nrows'], 'yunits': 'degrees_north', 'missing': miss } return (input_data, input_lat, input_lon, input_dict)
def flttonc(fltstem, ncfile, varname, yyyymmdd, units=None): """Main function to process a binary flt and hdr with name FLTSTEM to netCDF file NCFILE with variable name VARNAME. Time is set to the date corresponding to YYYYMMDD. Units can be provided optionally. """ d = datetime.strptime(yyyymmdd, "%Y%M%d") # Read the metadata from the header meta = {} r = re.compile("^(\S+)\s+(\S+)$") f = open(fltstem + '.hdr', 'r') for line in f: line = line.strip() m = re.match(r, line) if m: if m.group(1).lower() == 'byteorder': meta[m.group(1).lower()] = m.group(2) else: meta[m.group(1).lower()] = float(m.group(2)) f.close() # Get the data and shape it appropriately a = np.fromfile(fltstem + '.flt', dtype=np.float32) a.reshape(int(meta['ncols']), int(meta['nrows']), 1) # Make the lon and lat coordinate variables if 'xllcenter' in meta: lon = nr.create_vector(meta['xllcenter'], \ meta['ncols'], \ meta['cellsize']) else: lon = nr.create_vector(meta['xllcorner']+0.5*meta['cellsize'], \ meta['ncols'], \ meta['cellsize']) if 'yllcenter' in meta: lat = nr.create_vector(meta['yllcenter'], \ meta['nrows'], \ meta['cellsize']) else: lat = nr.create_vector(meta['yllcorner']+0.5*meta['cellsize'], \ meta['nrows'], \ meta['cellsize']) # Reverse the latitude elements so they run from North to South lat = lat[::-1] # Make a :history global attribute history = [] now = datetime.now() history.append( "flttonc at %04d-%02d-%02dT%02d:%02d:%02d" % (now.year, now.month, now.day, now.hour, now.minute, now.second)) history.append("Input file: " + fltstem + ".flt") attr = {} if 'nodata_value' in meta: attr[varname + ':_FillValue'] = meta['nodata_value'] if units: attr[varname + ':units'] = units attr['history'] = '\n'.join(history) # Write the netCDF file ncobj = nh.nc3_open(ncfile, 'w') nh.nc3_set_timelatlon(ncobj, 1, len(lat), len(lon)) nh.nc3_set_var(ncobj, varname) nh.nc3_set_var(ncobj, 'wgs84', dims=()) # Grid mapping container nh.nc3_add_time(ncobj, [d]) nh.nc3_add_data(ncobj, 'latitude', lat) nh.nc3_add_data(ncobj, 'longitude', lon) nh.nc3_add_data(ncobj, varname, a) nh.nc3_set_attributes(ncobj, attr) nh.nc3_close(ncobj) return ncfile