def read_data(path): CMIP_label = dt(path + '/enso_round1_train_20210201/CMIP_label.nc', 'r') CMIP_train = dt(path + '/enso_round1_train_20210201/CMIP_train.nc', 'r') SODA_label = dt(path + '/enso_round1_train_20210201/SODA_label.nc', 'r') SODA_train = dt(path + '/enso_round1_train_20210201/SODA_train.nc', 'r') return CMIP_label.variables, CMIP_train.variables, SODA_label.variables, SODA_train.variables
def ROMS_CV_Load(RomsFile, VarName, IndBounds): """Loads ROMS variables in control volume defined by lat and lon bounds lat_rho/psi, lon_rho/psi, time, u, ubar, v, vbar, w, and a defined variable at defined latitude and longitude. Stores in dictionary""" #load nc file RomsNC = dt(RomsFile, 'r') #subset variables in control volume and store in dictionary ROMS_CV = {'lat_rho' : np.array(RomsNC.variables['lat_rho'][IndBounds['Rho']['lat_li']:IndBounds['Rho']['lat_ui'], \ IndBounds['Rho']['lon_li']:IndBounds['Rho']['lon_ui']], dtype = np.float64), \ 'lon_rho' : np.array(RomsNC.variables['lon_rho'][IndBounds['Rho']['lat_li']:IndBounds['Rho']['lat_ui'],\ IndBounds['Rho']['lon_li']:IndBounds['Rho']['lon_ui']], dtype = np.float64), \ 'time' : np.array(RomsNC.variables['ocean_time'][:], dtype = np.float64), \ #'w' : np.array(RomsNC.variables['w'][:, :, IndBounds['Rho']['lat_li']:IndBounds['Rho']['lat_ui'],IndBounds['Rho']['lon_li']:IndBounds['Rho']['lon_ui']], dtype = np.float64), \ VarName : np.array(RomsNC.variables[VarName][:, :, IndBounds['Rho']['lat_li']:IndBounds['Rho']['lat_ui'], \ IndBounds['Rho']['lon_li']:IndBounds['Rho']['lon_ui']], dtype = np.float64), \ # psi variables (on edges) 'lat_psi': np.array(RomsNC.variables['lat_psi'][IndBounds['Psi']['lat_li']:IndBounds['Psi']['lat_ui'], \ IndBounds['Psi']['lon_li']:IndBounds['Psi']['lon_ui']], dtype = np.float64), \ 'lon_psi': np.array(RomsNC.variables['lon_psi'][IndBounds['Psi']['lat_li']:IndBounds['Psi']['lat_ui'], \ IndBounds['Psi']['lon_li']:IndBounds['Psi']['lon_ui']], dtype = np.float64), \ 'u' : np.array(RomsNC.variables['u'][:, :, IndBounds['Psi']['lat_li']:IndBounds['Psi']['lat_ui'], \ IndBounds['Psi']['lon_li']:IndBounds['Psi']['lon_ui']], dtype = np.float64), \ #'ubar' : np.array(RomsNC.variable['ubar'][:, IndBounds['Psi']['lat_li']:IndBounds['Psi']['lat_ui'], IndBounds['Psi']['lon_li']:IndBounds['Psi']['lon_ui']], dtype = np.float64), \ 'v' : np.array(RomsNC.variables['v'][:, :, IndBounds['Psi']['lat_li']:IndBounds['Psi']['lat_ui'], \ IndBounds['Psi']['lon_li']:IndBounds['Psi']['lon_ui']], dtype = np.float64), \ #'vbar' : np.array(RomsNC.variables['vbar'][:, IndBounds['Psi']['lat_li']:IndBounds['Psi']['lat_ui'], IndBounds['Psi']['lon_li']:IndBounds['Psi']['lon_ui']], dtype = np.float64) } return ROMS_CV
def ROMS_CV_AddVar(RomsFile, ROMS_CV, VarName, IndBounds): """Add variable to ROMS control volume dictionary """ RomsNC = dt(RomsFile, 'r') #update dictionary with new variable ROMS_CV[VarName] = np.array(RomsNC.variables[VarName][:, :, \ IndBounds['Rho']['lat_li']:IndBounds['Rho']['lat_ui'], \ IndBounds['Rho']['lon_li']:IndBounds['Rho']['lon_ui']], dtype = np.float64) return ROMS_CV
def RhoPsiIndex(RomsFile, latbounds, lonbounds): """locates indices of lat and lon within ROMS Output File assuming regular spacing""" #load nc file RomsNC = dt(RomsFile, 'r') #check if lat bounds are increasing if latbounds[0] < latbounds[1]: #locate lat rho points within bounds RhoInd = {'lat_li' : np.argmin(np.array(np.abs(RomsNC.variables['lat_rho'][:, 0] - latbounds[0]))), \ 'lat_ui' : np.argmin(np.array(np.abs(RomsNC.variables['lat_rho'][:, 0] - latbounds[1]))), \ } #locate lat psi points within bounds; add one to psi upper bound PsiInd = {'lat_li' : np.argmin(np.array(np.abs(RomsNC.variables['lat_psi'][:, 0] - latbounds[0]))), \ 'lat_ui' : np.argmin(np.array(np.abs(RomsNC.variables['lat_psi'][:, 0] - latbounds[1])))+1, \ } else: RhoInd = {'lat_li' : np.argmin(np.array(np.abs(RomsNC.variables['lat_rho'][:, 0] - latbounds[1]))), \ 'lat_ui' : np.argmin(np.array(np.abs(RomsNC.variables['lat_rho'][:, 0] - latbounds[0]))), \ } PsiInd = {'lat_li' : np.argmin(np.array(np.abs(RomsNC.variables['lat_psi'][:, 0] - latbounds[1]))), \ 'lat_ui' : np.argmin(np.array(np.abs(RomsNC.variables['lat_psi'][:, 0] - latbounds[0])))+1, \ } #check if lon bounds are increasing if lonbounds[0] < lonbounds[1]: #locate lon rho & psi points; add 1 to psi points to get last grid cell RhoInd['lon_li'] = np.argmin( np.array(np.abs(RomsNC.variables['lon_rho'][0, :] - lonbounds[0]))) RhoInd['lon_ui'] = np.argmin( np.array(np.abs(RomsNC.variables['lon_rho'][0, :] - latbounds[1]))) PsiInd['lon_li'] = np.argmin( np.array(np.abs(RomsNC.variables['lon_psi'][0, :] - lonbounds[0]))) PsiInd['lon_ui'] = np.argmin( np.array( np.abs(RomsNC.variables['lon_psi'][0, :] - latbounds[1]))) + 1 else: RhoInd['lon_li'] = np.argmin( np.array(np.abs(RomsNC.variables['lon_rho'][0, :] - lonbounds[1]))) RhoInd['lon_ui'] = np.argmin( np.array(np.abs(RomsNC.variables['lon_rho'][0, :] - lonbounds[0]))) PsiInd['lon_li'] = np.argmin( np.array(np.abs(RomsNC.variables['lon_psi'][0, :] - lonbounds[1]))) PsiInd['lon_ui'] = np.argmin( np.array( np.abs(RomsNC.variables['lon_psi'][0, :] - lonbounds[0]))) + 1 IndBounds = {'Rho': RhoInd, 'Psi': PsiInd} return IndBounds
def write_CAETE_output(nc_filename, arr, var): t, la, lo, = arr.shape # create netcdf file rootgrp = dt(nc_filename, mode='w', format='NETCDF4') #dimensions rootgrp.createDimension("time", None) rootgrp.createDimension("latitude", la) rootgrp.createDimension("longitude", lo) #variables time = rootgrp.createVariable(varname="time", datatype=np.float32, dimensions=("time",)) latitude = rootgrp.createVariable(varname="latitude", datatype=np.float32,dimensions=("latitude",)) longitude = rootgrp.createVariable(varname="longitude", datatype=np.float32, dimensions=("longitude",)) var_ = rootgrp.createVariable(varname = 'annual_cycle_mean_of_'+str(flt_attrs()[var][2]), datatype=np.float32, dimensions=("time","latitude","longitude",), fill_value=NO_DATA[0]) #attributes ## rootgrp rootgrp.description = flt_attrs()[var][0] + " from CAETE_1981-2010--> annual cycle" rootgrp.source = "CAETE model outputs" ## time time.units = "days since 1850-01-01 00:00:00.0" time.calendar = "noleap" time.axis='T' ## lat latitude.units = u"degrees_north" latitude.long_name=u"latitude" latitude.standart_name =u"latitude" latitude.axis = u'Y' ## lon longitude.units = "degrees_east" longitude.long_name = "longitude" longitude.standart_name = "longitude" longitude.axis = 'X' ## var var_.long_name=flt_attrs()[var][0] var_.units = flt_attrs()[var][1] var_.standard_name=flt_attrs()[var][2] var_.missing_value=NO_DATA[0] ## WRITING DATA times_fill = np.array([15.5, 45., 74.5, 105., 135.5, 166., 196.5, 227.5, 258., 288.5, 319., 349.5]) time[:] = times_fill longitude[:] = np.arange(-179.75, 180, 0.5) latitude[:] = np.arange(-89.75, 90, 0.5) var_[:,:,:] = np.fliplr(np.ma.masked_array(arr, lsmk)) rootgrp.close()
def x_grad_GridCor00(RomsFile, RomsGrd, varname) : """ compute gradient in x (lon) direction """ #load roms file RomsNC = dt(RomsFile, 'r') #load variable and compute differential var = RomsNC.variables[varname][:] dvar_x = np.diff(var, n = 1, axis = 3) dvar_z = np.diff(var, n = 1, axis = 1) #compute depth at rho points depth = dep._set_depth_T(RomsFile, None, 'rho', RomsNC.variables['h'],RomsNC.variables['zeta']) #distance between rho points in x and y directions x_dist = dist(RomsGrd)[0] #repeat over depth and time space _DX = np.repeat(np.array(x_dist)[np.newaxis, :, :], depth.shape[1], axis = 0) dx = np.repeat(np.array(_DX)[np.newaxis, :, :, :], depth.shape[0], axis = 0) #depth difference between adjacent rho points dz_x = np.diff(depth, n = 1, axis = 3) #vertical derivative dz_z = np.diff(depth, n = 1, axis = 1) dp_dz0 = dvar_z/dz_z dp_dz = 0.5*(dp_dz0[:,:,:,0:_dp_dz.shape[3]-1] + dp_dz0[:,:,:, 1:dp_dz0.shape[3]]) #distance between adjacent rho points dl = np.sqrt(dx*dx + dz*dz) #correction for roms grid dp_dl = dvar/dl*dl/dx #gradient dp_dx = dvar/dx rat = np.abs(dp_dl[0,:,:,:])/np.abs(dp_dx[0,:,:,:]) grat = dl[0,:,:,:]/dx[0,:,:,:] stat = np.array(rat).flatten()
def ModelDepth(RomsFile, point_type, IndBounds): """Computes ROMS depth within control volume defined by lat and lon bounds uses obs_depth, converted from set_depth.m""" #load nc file RomsNC = dt(RomsFile, 'r') #ROMS variables romsvars = {'h' : RomsNC.variables['h'], \ 'zeta' : RomsNC.variables['zeta'],\ 'N' : RomsNC.variables['Cs_r'].size} #compute depth depth_domain = dep._set_depth(RomsFile, None, point_type, romsvars['h'], romsvars['zeta']) #subset at control volume depth = np.array(depth_domain[:,IndBounds['Rho']['lat_li']:IndBounds['Rho']['lat_ui'],\ IndBounds['Rho']['lon_li']:IndBounds['Rho']['lon_ui']]) return depth
def GetMonthArray(file_name, min_lat, max_lat, min_lon, max_lon, var='V_GRD_L100'): """ This function takes the month and the data and stacks up all the arrays SIDENOTE: anyone hiring will really like a docstring like this for any functions on your github. SIDENOTE2: Notice there are no numbers hard coded into this function. That way you can call it from anywhere with any changes you want to make, like increasing the latitude SIDENOTE3: This is a good way to open a dataset like this because after this function runs, the dataset is not left in memory Parameters ---------- file_name : str A three letter string for the month lat and long variables: integer These are the min and max latitude and longitude of interest in grid format var : str The NETCDF 4 variable name that you want to extract * Optional, it will use 'V_GRD_L100' if nothing is given Returns ------- arr : numpy array 3 dimensional numpy array of form: [4 * days in month, max_lat - min_lat, max_lon - min_lon] example for June is [120, 17, 11] """ full_data = dt(file_name, 'r') chopped_data = np.array(full_data.variables[var][:, min_lat:max_lat, min_lon:max_lon]) return chopped_data
def get_var(self, var): if (type(var) == type('str')) and (self.files is not None) and (len(self.files) > 0): fname = [filename for filename in self.files if var == filename.split('_')[0]] else: self.NotWork = True return None try: fname_comp = self.files_dir + os.sep + fname[0] except: print('variável --> %s não está no diretório --> %s' % (var, self.files_dir)) self.NotWork = True return None try: dataset = dt(fname_comp, 'r') except IOError: print('Cannot open %s file' % var) self.NotWork = True return None else: # data, time, units & etc. calendar = dataset.variables['time'].calendar time_units = dataset.variables['time'].units time_arr = dataset.variables['time'][:] data_units = dataset.variables[var].units var_longname = dataset.variables[var].long_name self.metadata[var] = (calendar, time_units, time_arr, data_units, var_longname) dados = dataset.variables[var][:,:,:] dataset.close() return np.fliplr(np.array(dados))
def rho_dist(RomsFile): """ Use seawater package to compute distance between rho points """ RomsNC = dt(RomsFile, 'r') lat = RomsNC.variables['lat_rho'][:] lon = RomsNC.variables['lon_rho'][:] x_dist = np.empty((lon.shape[0], lon.shape[1] - 1)) x_dist.fill(np.nan) for i in range(lon.shape[0]): for j in range(lon.shape[1] - 1): x_dist[i, j] = sw.dist([lat[i, j], lat[i, j + 1]], [lon[i, j], lon[i, j + 1]])[0] y_dist = np.empty((lat.shape[0] - 1, lat.shape[1])) y_dist.fill(np.nan) for i in range(y_dist.shape[0]): for j in range(y_dist.shape[1]): y_dist[i, j] = sw.dist([lat[i, j], lat[i + 1, j]], [lon[i, j], lon[i + 1, j]]) return x_dist, y_dist
import matplotlib import os matplotlib.use('Agg') import matplotlib.pyplot as plt from matplotlib import colors as c from netCDF4 import Dataset as dt, num2date from mpl_toolkits.basemap import Basemap, cm, shiftgrid import statsmodels.tsa.api as sm import scipy ############################################################ # import variable data (SH lats only) from /LENSoutput directory # data begins December of year 1 for easier data management # time dimension may be restricted to 10-years # to speed up analysis processes at first ICEFRAC_file = dt( 'b.e11.B1850C5CN.f09_g16.005.cam.h1.ICEFRAC.04020101-04991231.nc') FSNS_file = dt('b.e11.B1850C5CN.f09_g16.005.cam.h1.FSNS.04020101-04991231.nc') ICEFRAC = ICEFRAC_file.variables['ICEFRAC'][333:3983, 0:60, :] FSNS = FSNS_file.variables['FSNS'][333:3983, 0:60, :] # convert ICEFRAC from decimal to % ICEFRAC = ICEFRAC * 100 # import time data to aid in seasonal selection time = ICEFRAC_file.variables['time'][333:3983] # set day 1 to 0 instead of "days since year 256 (see header file)" time = time - 42674 # import LAT and LON data from one variable for map generation lons = ICEFRAC_file.variables['lon'][:] lats = ICEFRAC_file.variables['lat'][0:60]
import numpy as np from matplotlib import pyplot as plt from netCDF4 import Dataset as dt # LOAD DATA FROM NETCDF AS PARENT DATASET dataset = dt('your_netcdf_file.nc', 'r') lon = np.array(dataset['longitude']) lat = np.array(dataset['latitude']) zvar = np.array(dataset['skt']) # DEFINE YOUR DOMAIN lat1 = 14 lat2 = 15 lon1 = 75 lon2 = 76 if lat1 > lat2 or lat1 < lat.min() or lat2 > lat.max(): print "INVALID DOMAIN: PLEASE" a1 = np.where(lat >= lat1)[0][-1]+1 a2 = np.where(lat >= lat2)[0][-1]-1 lat_new = lat[a2:a1+1] a1 = np.where(lon >= lon1)[0][0]-1 a2 = np.where(lon >= lon2)[0][0]+1 lon_new = lon[a1:a2+1] zvar_new = zvar[:, a2:a1+1, a1:a2+1]
import matplotlib import numpy as np import time import scipy.io as sio from subprocess import call # lat1 = np.linspace(-90,90,37) # lat2 = np.linspace(-87.5,87.5,36) # lon1 = np.linspace(-180,180,73) # lon2 = np.linspace(-177.5,177.5,72) # top1 = np.linspace(1,53,53) # top2 = np.linspace(1.5,52.5,52) filestr = "ncfile2.nc" ncfile = dt(filestr, "r") # ZNU = np.array(ncfile.variables['ZNU'][:],order='F') lat = np.array(ncfile.variables["latitude"][:], dtype=np.float32) lon = np.array(ncfile.variables["longitude"][:], dtype=np.float32) t = np.array(ncfile.variables["time"][:], dtype=np.int32) u = np.array(ncfile.variables["u10"][:], dtype=np.float32) v = np.array(ncfile.variables["v10"][:], dtype=np.float32) T = np.array(ncfile.variables["t2m"][:], dtype=np.float32) al = np.array(ncfile.variables["al"][:], dtype=np.float32) # us = np.linspace(-20, 20, 11, endpoint=True) # plt.contourf(lon,lat,u[5][:][:],us,cmap='seismic') # plt.contourf(lat2,top2,x.T,40,linewidth=3,cmap='gist_heat_r',vmin=0,vmax=0.0035) # Use seismic colour map as well # plt.xlabel('Longitude', fontsize=10) # plt.ylabel('Latitude', fontsize=10)
def write_CAETE_output(nc_filename, arr, var, pls_mode=False): if var in flt_attrs().keys(): ldim = flt_attrs()[var][3] else: ldim = 1 if ldim > 1: t, la, lo, = arr.shape one_layer = False else: la, lo = arr.shape one_layer = True lsmk_internal = mask_gen(ldim) # create netcdf file rootgrp = dt(nc_filename, mode='w', format='NETCDF3_CLASSIC') #dimensions & variables rootgrp.createDimension("latitude", la) rootgrp.createDimension("longitude", lo) if var in monthly_out: rootgrp.createDimension("time", None) time = rootgrp.createVariable(varname="time", datatype=np.float32, dimensions=("time", )) elif var in npls_out: rootgrp.createDimension("pls", npls) pls = rootgrp.createVariable(varname="pls", datatype=np.int32, dimensions=("pls", )) latitude = rootgrp.createVariable(varname="latitude", datatype=np.float32, dimensions=("latitude", )) longitude = rootgrp.createVariable(varname="longitude", datatype=np.float32, dimensions=("longitude", )) if var in monthly_out: var_ = rootgrp.createVariable(varname='annual_cycle_mean_of_' + str(flt_attrs()[var][2]), datatype=np.float32, dimensions=( "time", "latitude", "longitude", ), fill_value=NO_DATA[0]) elif var in npls_out: var_ = rootgrp.createVariable(varname=str(flt_attrs()[var][2]), datatype=np.float32, dimensions=( "pls", "latitude", "longitude", ), fill_value=NO_DATA[0]) elif one_layer: var_ = rootgrp.createVariable(varname=str(flt_attrs()[var][2]), datatype=np.float32, dimensions=( "latitude", "longitude", ), fill_value=NO_DATA[0]) #attributes ## rootgrp rootgrp.description = flt_attrs()[var][0] + " caete-v1.0 OUTPUT" rootgrp.source = "CAETE model outputs" ## time if var in monthly_out: time.units = "days since 1850-01-01 00:00:00.0" time.calendar = "noleap" time.axis = 'T' if var in npls_out: pls.units = '1' pls.axis = u'T' ## lat latitude.units = u"degrees_north" latitude.long_name = u"latitude" latitude.standart_name = u"latitude" latitude.axis = u'Y' ## lon longitude.units = "degrees_east" longitude.long_name = "longitude" longitude.standart_name = "longitude" longitude.axis = u'X' ## var var_.long_name = flt_attrs()[var][0] var_.units = flt_attrs()[var][1] var_.standard_name = flt_attrs()[var][2] var_.missing_value = NO_DATA[0] ## WRITING DATA if var in monthly_out: time[:] = np.array([ 15.5, 45., 74.5, 105., 135.5, 166., 196.5, 227.5, 258., 288.5, 319., 349.5 ]) if var in npls_out: pls[:] = np.arange(1, npls + 1) longitude[:] = np.arange(-179.75, 180, 0.5) latitude[:] = np.arange(-89.75, 90, 0.5) if not one_layer: var_[:, :, :] = np.fliplr(np.ma.masked_array(arr, lsmk_internal)) else: var_[:, :] = np.flipud(np.ma.masked_array(arr, lsmk_internal)) rootgrp.close()
import numpy as np import matplotlib.pyplot as plt from glob import glob #%% June = glob('*06.grb2.nc') July = glob('*07.grb2.nc') Aug = glob('*08.grb2.nc') Sep = glob('*09.grb2.nc') JUNE_ALL = np.zeros([120, 17, 11, 27]) JULY_ALL = np.zeros([124, 17, 11, 27]) AUG_ALL = np.zeros([124, 17, 11, 27]) SEP_ALL = np.zeros([120, 17, 11, 27]) #%% for i in range(len(June)): dummy = June[i] dummy = dt(dummy, 'r') dummy = np.array(dummy.variables['V_GRD_L100'][:, 150:167, 670:681]) JUNE_ALL[:, :, :, i] = dummy for i in range(len(July)): dummy = July[i] dummy = dt(dummy, 'r') dummy = np.array(dummy.variables['V_GRD_L100'][:, 150:167, 670:681]) JULY_ALL[:, :, :, i] = dummy for i in range(len(Aug)): dummy = Aug[i] dummy = dt(dummy, 'r') dummy = np.array(dummy.variables['V_GRD_L100'][:, 150:167, 670:681]) AUG_ALL[:, :, :, i] = dummy for i in range(len(Sep)): dummy = Sep[i] dummy = dt(dummy, 'r')
#print 'Argument List:', str(sys.argv) Args = sys.argv #print Args p1 = [0.20943952, 6.96069e+06, 1.25266] #lon rad lat p2 = [3.66519, 8.01733e+06, 0.338019] file = '../ModelRunGeo-2019-019-221500.nc' #file = Args[1]; p1 = [float(Args[2]), float(Args[3]), float(Args[4])] #p2 = [float(Args[5]), float(Args[6]), float(Args[7])]; #print p1, p2 ncfile = dt(file, 'r') #ED = np.array(ncfile.variables['Electron_Density'][:], dtype=np.float32); ED = ncfile.variables['Electron_Density'][:][0] Geo_Lon = ncfile.variables['Geo_Lon'][:][0] Geo_Radius = ncfile.variables['Geo_Radius'][:][0] Geo_Lat = ncfile.variables['Geo_Lat'][:][0] P1 = [] P2 = [] def appendCoord(array, len): # вычисление в координатах массива for i in range(0, len(Geo_Lon) - 1): if Geo_Lon[i][0][0] <= p1[0] and p1[0] <= Geo_Lon[i + 1][0][0]: break
# -*- coding: utf-8 -*- """ Created on Thu Jun 16 22:49:45 2016 @author: kushal """ from netCDF4 import Dataset as dt # note the change of case in netCDF4 import numpy as np # Dataset is a netcdf object, similar to a dictionary filestr = 'data/ncfile2.nc' # This is the file name ncfile = dt(filestr, 'r') # Create a netcdf object in read mode #print ncfile.variables # To print all the variables lat = np.array(ncfile.variables['latitude'][:],dtype=np.float32) ''' Extract a variable named 'latitude' from the netcdf object as a numpy array, having all it's entries as real numbers of type float32 (32 bit real numbers). The numpy array's name is lat ''' lon = np.array(ncfile.variables['longitude'][:],dtype=np.float32) t = np.array(ncfile.variables['time'][:],dtype=np.int32) u = np.array(ncfile.variables['u10'][:],dtype=np.float32) v = np.array(ncfile.variables['v10'][:],dtype=np.float32)
def write_snap_output(arr, var, flt_attrs, time_index, experiment="TEST RUN HISTORICAL ISIMIP"): NO_DATA = [-9999.0, -9999.0] time_units = TIME_UNITS calendar = CALENDAR nc_out = Path("../nc_outputs") time_dim = time_index longitude_0 = np.arange(-179.75, 180, 0.5)[201:272] latitude_0 = np.arange(89.75, -90, -0.5)[160:221] print("\nSaving netCDF4 files") print_progress(0, len(var), prefix='Progress:', suffix='Complete') for i, v in enumerate(var): nc_filename = os.path.join(nc_out, Path(f'{v}.nc4')) with dt(nc_filename, mode='w', format='NETCDF4') as rootgrp: # dimensions & variables rootgrp.createDimension("latitude", latitude_0.size) rootgrp.createDimension("longitude", longitude_0.size) rootgrp.createDimension("time", None) time = rootgrp.createVariable(varname="time", datatype=np.int32, dimensions=("time", )) latitude = rootgrp.createVariable(varname="latitude", datatype=np.float32, dimensions=("latitude", )) longitude = rootgrp.createVariable(varname="longitude", datatype=np.float32, dimensions=("longitude", )) var_ = rootgrp.createVariable(varname=flt_attrs[v][2], datatype=np.float32, dimensions=( "time", "latitude", "longitude", ), zlib=True, fill_value=NO_DATA[0], fletcher32=True) # attributes # rootgrp rootgrp.description = flt_attrs[v][0] + " from CAETÊ-CNP OUTPUT" rootgrp.source = "CAETE model outputs - [email protected]" rootgrp.experiment = experiment # time time.units = time_units time.calendar = calendar time.axis = 'T' # lat latitude.units = u"degrees_north" latitude.long_name = u"latitude" latitude.standart_name = u"latitude" latitude.axis = u'Y' # lon longitude.units = "degrees_east" longitude.long_name = "longitude" longitude.standart_name = "longitude" longitude.axis = u'X' # var var_.long_name = flt_attrs[v][0] var_.units = flt_attrs[v][1] var_.standard_name = flt_attrs[v][2] var_.missing_value = NO_DATA[0] # WRITING DATA longitude[:] = longitude_0 latitude[:] = latitude_0 time[:] = time_dim var_[:, :, :] = np.ma.masked_array(arr[i], mask=arr[i] == NO_DATA[0]) print_progress(i + 1, len(var), prefix='Progress:', suffix='Complete')
from netCDF4 import Dataset as dt import numpy as np import matplotlib.pyplot as plt import pandas as pd test_file = 'data/atmPrf_C001.2010.001.00.00.G20_2013.3520.nc' test_ncfile = dt(test_file, 'r') test_lat = np.array(test_ncfile.variables['Lat'][:]) test_lon = np.array(test_ncfile.variables['Lon'][:]) test_pres = np.array(test_ncfile.variables['Pres'][:]) test_temp = np.array(test_ncfile.variables['Temp'][:]) test_s = (len(test_lat), 4) test_data = np.zeros(test_s) for i in range(len(test_lat)): test_data[i][0] = test_pres[i] test_data[i][1] = test_lat[i] test_data[i][2] = test_lon[i] test_data[i][3] = test_temp[i] train_file = 'ta_6hrPlev_CMAM-Ext_CMAM30-SD_r1i1p1_2010010100-2010063018.nc' train_ncfile = dt(train_file, 'r') train_time = np.array(train_ncfile.variables['time'][:]) #len 724 train_plev = np.array(train_ncfile.variables['plev'][:]) #len 87 train_lat = np.array(train_ncfile.variables['lat'][:]) #len 32 train_lon = np.array(train_ncfile.variables['lon'][:]) #len 64 train_temp = np.array(train_ncfile.variables['ta'])
def write_area_output(arr, time_index, experiment="TEST RUN HISTORICAL ISIMIP"): NO_DATA = [-9999.0, -9999.0] time_units = TIME_UNITS calendar = CALENDAR nc_out = Path("../nc_outputs") time_dim = time_index longitude_0 = np.arange(-179.75, 180, 0.5)[201:272] latitude_0 = np.arange(89.75, -90, -0.5)[160:221] nc_filename = os.path.join(nc_out, Path(f'ocp_area.nc4')) with dt(nc_filename, mode='w', format='NETCDF4') as rootgrp: # dimensions & variables rootgrp.createDimension("latitude", latitude_0.size) rootgrp.createDimension("longitude", longitude_0.size) rootgrp.createDimension("pls", arr.shape[1]) rootgrp.createDimension("time", None) time = rootgrp.createVariable(varname="time", datatype=np.int32, dimensions=("time", )) pls = rootgrp.createVariable(varname="PLS", datatype=np.int16, dimensions=("pls", )) latitude = rootgrp.createVariable(varname="latitude", datatype=np.float32, dimensions=("latitude", )) longitude = rootgrp.createVariable(varname="longitude", datatype=np.float32, dimensions=("longitude", )) var_ = rootgrp.createVariable(varname='ocp_area', datatype=np.float32, dimensions=( "time", "pls", "latitude", "longitude", ), fill_value=NO_DATA[0]) # attributes # rootgrp rootgrp.description = "Ocupation coefficients of Plant Life Strategies" + \ " from CAETÊ-CNP OUTPUT" rootgrp.source = "CAETE model outputs" rootgrp.experiment = experiment # time time.units = time_units time.calendar = calendar time.axis = u'T' # time pls.units = u'unitless' pls.axis = u'P' # lat latitude.units = u"degrees_north" latitude.long_name = u"latitude" latitude.standart_name = u"latitude" latitude.axis = u'Y' # lon longitude.units = "degrees_east" longitude.long_name = "longitude" longitude.standart_name = "longitude" longitude.axis = u'X' # var var_.long_name = "Occupation coefficients of Plant Life Strategies (Abundance data)" var_.units = "unitless" var_.standard_name = 'ocp_area' var_.missing_value = NO_DATA[0] # WRITING DATA pls[:] = np.arange(100, dtype=np.int16) longitude[:] = longitude_0 latitude[:] = latitude_0 time[:] = time_dim var_[:, :, :, :] = np.ma.masked_array(arr, mask=arr == NO_DATA[0])
# Program to visualize the data import numpy as np from netCDF4 import Dataset as dt import matplotlib.pyplot as plt import matplotlib from mpl_toolkits.mplot3d import Axes3D from scipy.interpolate import griddata fname = 'datas_small_rotation.nc' fp = dt(fname, 'r') # print(fp.variables) x = np.array(fp.variables['X Distance'][:]) y = np.array(fp.variables['Y Distance'][:]) t = np.array(fp.variables['T Time'][:]) h = np.array(fp.variables['H_Level'][:]) u = np.array(fp.variables['U_Velocity'][:]) v = np.array(fp.variables['V_Velocity'][:]) uv = (u**2.0 + v**2.0)**0.5 # regridding data for smoother values X, Y = np.meshgrid(x, y) # X, Y = np.mgrid[0:10:1001j, 0:10:1001j] # H = np.zeros((10001,1001,1001)) ''' for i in range(0, 10001):
Created on Tue May 12 17:06:07 2020 @author: Jasen Load World Ocean Atlas Data """ import numpy as np from netCDF4 import Dataset as dt #WOA data location TempFile = '/Users/Jasen/Documents/Data/WOA_Data/AnalyzedMean/woa18_decav_t00_01.nc' SaltFile = '/Users/Jasen/Documents/Data/WOA_Data/AnalyzedMean/woa18_decav_s00_01.nc' NO3_File = '/Users/Jasen/Documents/Data/WOA_Data/AnalyzedMean/woa18_all_n00_01.nc' #WOA data load TempNC = dt(TempFile, 'r') SaltNC = dt(SaltFile, 'r') NO3_NC = dt(NO3_File, 'r') #Extract location [NOTE: _NC.variables == ncdisp(), _NC.variables.keys == simple list] #ROMS reference file RomsFile = '/Users/Jasen/Documents/Data/ROMS_ICBC/wc12_hycom_20090101_dnref99_ini_Darwin_NuteMap.nc' RomsNC = dt(RomsFile, 'r') ROMSlat = np.array(RomsNC.variables[latitude][:], dtype=np.float64) #Select every point at 1 degree intervals on 0.5 degree locaitons #Expand domain to include points outside of ROMS wc12 domain for interpolation #locate indices of WOA lat within ROMS wc12 domain
import matplotlib import numpy as np import time import scipy.io as sio from subprocess import call #lat1 = np.linspace(-90,90,37) #lat2 = np.linspace(-87.5,87.5,36) #lon1 = np.linspace(-180,180,73) #lon2 = np.linspace(-177.5,177.5,72) #top1 = np.linspace(1,53,53) #top2 = np.linspace(1.5,52.5,52) filestr = 'ncfile2.nc' ncfile = dt(filestr, 'r') #ZNU = np.array(ncfile.variables['ZNU'][:],order='F') lat = np.array(ncfile.variables['latitude'][:],dtype=np.float32) lon = np.array(ncfile.variables['longitude'][:],dtype=np.float32) t = np.array(ncfile.variables['time'][:],dtype=np.int32) u = np.array(ncfile.variables['u10'][:],dtype=np.float32) v = np.array(ncfile.variables['v10'][:],dtype=np.float32) T = np.array(ncfile.variables['t2m'][:],dtype=np.float32) al = np.array(ncfile.variables['al'][:],dtype=np.float32) #us = np.linspace(-20, 20, 11, endpoint=True) #plt.contourf(lon,lat,u[5][:][:],us,cmap='seismic') # plt.contourf(lat2,top2,x.T,40,linewidth=3,cmap='gist_heat_r',vmin=0,vmax=0.0035) # Use seismic colour map as well #plt.xlabel('Longitude', fontsize=10) #plt.ylabel('Latitude', fontsize=10)
# -*- coding: utf-8 -*- """ Created on Mon Aug 17 08:23:36 2015 @author: arun """ import numpy as np # import matplotlib.pyplot as plt import scipy.io as sio from netCDF4 import Dataset as dt filestr = 'ncfile2.nc' ncfile = dt(filestr, 'r') #ZNU = np.array(ncfile.variables['ZNU'][:],order='F') lat = np.array(ncfile.variables['latitude'][:],dtype=np.float32) lon = np.array(ncfile.variables['longitude'][:],dtype=np.float32) u = np.array(ncfile.variables['u10'][:],dtype=np.float32) v = np.array(ncfile.variables['v10'][:],dtype=np.float32) T = np.array(ncfile.variables['t2m'][:],dtype=np.float32) ''' To save in mat files, first you need a dictionary having all the values as keys Second, you need to specify a proper key values to them. Finally, you need the sio.savemat() option to save them! ''' mat = {} mat['lat'] = lat mat['lon'] = lon
# lat_ts is the latitude of true scale. # resolution = 'c' means use crude resolution coastlines. m = Basemap(projection='merc',llcrnrlat=-80,urcrnrlat=80,\ llcrnrlon=-180,urcrnrlon=180,lat_ts=20,resolution='c') m.drawcoastlines() m.fillcontinents(color='coral',lake_color='aqua') # draw parallels and meridians. m.drawparallels(np.arange(-90.,91.,30.)) m.drawmeridians(np.arange(-180.,181.,60.)) m.drawmapboundary(fill_color='aqua') plt.title("Mercator Projection") plt.show() #%% JUNE_79 = 'wnd700.gdas.197906.grb2.nc' JUNE_79 = dt(JUNE_79,'r') # JUNE_79=np.array(JUNE_79.variables['V_GRD_L100'][:]) JUNE_80 = 'wnd700.gdas.198006.grb2.nc' JUNE_80 = dt(JUNE_80,'r') # JUNE_80=np.array(JUNE_80.variables['V_GRD_L100'][:]) JUNE_81 = 'wnd700.gdas.198106.grb2.nc' JUNE_81 = dt(JUNE_81,'r') # JUNE_81=np.array(JUNE_81.variables['V_GRD_L100'][:]) JUNE_82 = 'wnd700.gdas.198206.grb2.nc' JUNE_82 = dt(JUNE_82,'r') # JUNE_82=np.array(JUNE_82.variables['V_GRD_L100'][:]) JUNE_83 = 'wnd700.gdas.198306.grb2.nc'
from netCDF4 import Dataset as dt, num2date from mpl_toolkits.basemap import Basemap, cm, shiftgrid import statsmodels.tsa.api as sm import scipy ############################################################ # import VAR model variables of interest: # FSNS (net surface shortwave radiation) # FSNSC (net surface shortwave radiation: clear sky) # FLNS (net surface longwave radiation) # FLNSC (not surface longwave radiation: clear sky) # ICEFRAC (ice fraction) # TGCLDLWP (total grid cloud liquid water path) # CLDTOT (vertically integrated cloud fraction) FSNS_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h1.FSNS.15000101-15991231.nc') FSNSC_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h1.FSNSC.15000101-15991231.nc') FLNS_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h1.FLNS.15000101-15991231.nc') FLNSC_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h1.FLNSC.15000101-15991231.nc') ICEFRAC_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h1.ICEFRAC.15000101-15991231.nc') TGCLDLWP_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h2.TGCLDLWP.1500010100Z-1510123118Z.nc') CLDTOT_file =\ dt('b.e11.B1850C5CN.f09_g16.005.cam.h2.CLDTOT.1500010100Z-1510123118Z.nc') ########################################################### # for monthly climatologies, we trim time dimension to start at march 1st