def get_geocoords(infile, lat='lat', lon='lon'): df = EcoFOCI_netCDF(infile) nchandle = df._getnchandle_() data = {} for j, v in enumerate([lat, lon]): data[v] = nchandle.variables[v][:] df.close() return (data)
def load(self): self.data = {} for ind, ncfile in enumerate(self.files_path): print("Working on {0}").format(ncfile) df = EcoFOCI_netCDF(ncfile) df.get_global_atts() vars_dic = df.get_vars() if self.plot_var in vars_dic: ncdata = df.ncreadfile_dic() df.close() ncdata[self.plot_var][np.where( ncdata[self.plot_var] > 1e34)] = np.nan else: continue self.ncdata = ncdata #only save last file self.data[self.pointer_dic['nominal_depth'][ind]] = { 'data': ncdata[self.plot_var][:, 0, 0, 0], 'time': EPIC2Datetime(ncdata['time'], ncdata['time2']) }
def from_netcdf_1dsplice(infile, height_ind, lat_ind, lon_ind): """ Uses ncreadfile_dic which returns a dictionary of all data from netcdf""" ###nc readin/out df = EcoFOCI_netCDF(infile) nchandle = df._getnchandle_() params = df.get_vars() #gets all of them print "Parameters available: " #print params ncdata = ncreadfile_dic_slice(nchandle, params, height_ind=height_ind, lat_ind=lat_ind, lon_ind=lon_ind) df.close() return ncdata
parser = argparse.ArgumentParser( description='convert netcdf file to erddap formatted file') parser.add_argument('sourcefile', metavar='sourcefile', type=str, help='path to .nc files') parser.add_argument('add_dsg_idvar', metavar='add_dsg_idvar', type=str, help='name of dsg style id variable') args = parser.parse_args() "---" df = EcoFOCI_netCDF(args.sourcefile) global_atts = df.get_global_atts() vars_dic = df.get_vars() nchandle = df._getnchandle_() data = df.ncreadfile_dic() try: nchandle.createDimension('id_strlen', 5) nchandle.createVariable('profileid', 'S1', dimensions=('record_number', 'id_strlen')) nchandle.variables['profileid'].cf_role = args.add_dsg_idvar nchandle.variables['profileid'].long_name = 'profile_id' except: print "{0} - not added".format(args.add_dsg_idvar)
""" databounds = {} if args.multi: ### cycle through all files, retrieve data and plot print files_path writer = pd.ExcelWriter('data/' + MooringID + '_' + plot_var + '.xlsx', engine='xlsxwriter', datetime_format='YYYY-MM-DD HH:MM:SS') label_thin = [] for ind, ncfile in enumerate(files_path): print "Working on {activefile}".format(activefile=ncfile) #open/read netcdf files df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() vars_dic = df.get_vars() ncdata = df.ncreadfile_dic() df.close() nctime = EPIC2Datetime(ncdata['time'], ncdata['time2']) #find and replace missing values with nans so they don't plot try: ncdata[plot_var][np.where(ncdata[plot_var] > 1e30)] = np.nan try: label_thin = label_thin + [label[ind]] except TypeError: label_thin = label_thin + ['']
parser = argparse.ArgumentParser(description='Trim NC files') parser.add_argument('-i', '--inputfiles', nargs='+', type=str, help='full path to each file seperated by a space') parser.add_argument('-o', '--outputfile', type=str, help='name of output file') parser.add_argument('-ek', '--EPIC_KEY', nargs='+', help='EPIC Keys to keep') args = parser.parse_args() data = {} for ncfile in args.inputfiles: print "Reading {file}".format(file=ncfile) ###nc readin/out df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() vars_dic = df.get_vars() tempdata = df.ncreadfile_dic() for key in vars_dic.keys(): if not key in ['lat', 'lon', 'depth' ]: #non-increasing dimensions should be skipped try: data[key] = np.concatenate((data[key], tempdata[key])) except: data[key] = tempdata[key] else: #only one value should be kept for non-increasing dimensions data[key] = tempdata[key] #df.close()
'--temp_scale', nargs=2, type=float, help='fixed temperature scale (min max)') parser.add_argument( '-timebounds', '--timebounds', nargs=3, type=str, help='fixed timebounds start: yyyy-mm-dd end: yyyy-mm-dd "month"') args = parser.parse_args() print "Working on file %s " % args.DataPath nc = EcoFOCI_netCDF(args.DataPath) ncdata = nc.ncreadfile_dic() g_atts = nc.get_global_atts() nc.close() cast_time = EPIC2Datetime(ncdata['time'], ncdata['time2']) doy = np.array([x.timetuple().tm_yday for x in cast_time]) p1 = TimeseriesPorpertyPropertyPlot() try: t1 = p1.add_title(mooringid=global_atts['MOORING'], lat=ncdata['lat'][0], lon=ncdata['lon'][0], depth=ncdata['depth'][0], instrument=args.instname) except KeyError:
plt.subplot2grid((3, 1), (1, 0), colspan=1, rowspan=3) ### set arbitrary max and min bounds to be changed later based on data bounds databounds['max_t'] = 0 databounds['min_t'] = 100000000 databounds['max_v'] = -50 databounds['min_v'] = 50 label_thin = [] ### cycle through all files, retrieve data and plot print files_path for ind, ncfile in enumerate(files_path): print "Working on {activefile}".format(activefile=ncfile) #open/read netcdf files df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() vars_dic = df.get_vars() ncdata = df.ncreadfile_dic() df.close() if args.timeseries_overlay: nctime = EPIC2Datetime(ncdata['time'], ncdata['time2']) def set_year_even(x): if x.year % 2 == 0: return x.replace(year=2000) elif x.year % 2 == 1: return x.replace(year=2001) def set_year_odd(x):
Options: point, timeSeries, trajectory, profile, timeSeriesProfile, trajectoryProfile''' ) parser.add_argument("-o", '--out_config', action="store_true", help='output to config file') parser.add_argument("-in", '--in_config', action="store_true", help='modify using current config file') args = parser.parse_args() ###nc readin ncfile = args.sourcefile df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() df.close() if args.screen: for k in global_atts.keys(): print "{0}: {1}".format(k, global_atts[k]) if args.out_config: for k in global_atts.keys(): global_atts[k] = str(global_atts[k]) print "{0}: {1}".format(k, global_atts[k]) ConfigParserLocal.write_config("header_config.yaml", global_atts, 'yaml')
this program using .yaml form """ if args.ifile.split('.')[-1] == 'nc': ifile = [args.ifile] elif args.ifile.split('.')[-1] == 'yaml': pointer_file = ConfigParserLocal.get_config_yaml(args.ifile) else: print "Data file or pointer file format not recognized" sys.exit() ############## # process optional time average string flag if args.filter: for ind_file in ifile: df = EcoFOCI_netCDF(ind_file) global_atts = df.get_global_atts() vars_dic = df.get_vars() data = df.ncreadfile_dic() df.close() if args.filter == 'F35': pass elif args.filter == 'F29': pass else: print "Choose a valid filter" ############## # averaging/resampling if args.tave and args.isCF: #use argument string to set up frequency
parser = argparse.ArgumentParser(description='Convert .nc to .csv screen output') parser.add_argument('infile', metavar='infile', type=str, help='input file path') parser.add_argument("-csv","--csv", action="store_true", help='output non-epic formatted netcdf as csv') parser.add_argument("-is_whoi","--is_whoi", action="store_true", help='flag if is directly from WHOI') parser.add_argument("-plots","--plots", action="store_true", help='generate plots') args = parser.parse_args() ###nc readin/out file1 = '/Volumes/WDC_internal/Users/bell/ecoraid/2016/Additional_FieldData/ArcticHeat/AlamoFloats/netcdf/arctic_heat_alamo_profiles_9058_9f75_d5e5_f5f9.nc' df = EcoFOCI_netCDF(file1) global_atts = df.get_global_atts() vars_dic = df.get_vars() dims = df.get_dims() data0 = df.ncreadfile_dic() df.close() file2 = '/Volumes/WDC_internal/Users/bell/ecoraid/2016/Additional_FieldData/ArcticHeat/AlamoFloats/netcdf/arctic_heat_alamo_profiles_9115_bb97_cc7e_a9c0.nc' df = EcoFOCI_netCDF(file2) global_atts = df.get_global_atts() vars_dic = df.get_vars() dims = df.get_dims() data1 = df.ncreadfile_dic() if args.is_whoi: timestr = 'days since 1950-01-01T00:00:00Z'
action="store_true", help='output to screen') parser.add_argument("-o", '--out_config', action="store_true", help='output to config file') parser.add_argument("-in", '--in_config', action="store_true", help='modify using current config file') args = parser.parse_args() ###nc readin ncfile = args.sourcefile df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() vars_dic = df.get_vars() if args.screen: for k in vars_dic.keys(): if k == args.varname: atts = df.get_vars_attributes(var_name=k, var_type='long_name') print "{0}: {1}".format(k, atts) if args.out_config: for k in vars_dic.keys(): if k == args.varname: atts = df.get_vars_attributes(var_name=k, var_type='long_name') print "{0}: {1}".format(k, atts)
help='instrument name') parser.add_argument("-rot", '--rotate', type=float, help='rotate vectors angle provided', default=0.0) parser.add_argument("-di", '--depth_index', type=int, default=0, help='0 indexed value for depth parameter to plot if 2d') args = parser.parse_args() #read in 1d data file df = EcoFOCI_netCDF(args.DataPath) global_atts = df.get_global_atts() vars_dic = df.get_vars() #check that variable is in data file and exit if not is_in_dic(args.epickey, vars_dic) ncdata = df.ncreadfile_dic() df.close() nctime = get_UDUNITS(EPIC2Datetime(ncdata['time'], ncdata['time2']), 'days since 0001-1-1') + 1.0 # filter data to convert 1e35 -> np.nan ncdata[args.epickey[0]][np.where( ncdata[args.epickey[0]][:, args.depth_index, 0, 0] >= 1e30), args.depth_index, 0, 0] = np.nan ncdata[args.epickey[1]][np.where( ncdata[args.epickey[1]][:, args.depth_index, 0, 0] >= 1e30),
parser.add_argument('plot_var', metavar='plot_var', type=str, help='EPIC Key to plot') parser.add_argument('depth_m', metavar='depth_m', type=int, help='local max depth') parser.add_argument('-fg', '--FillGaps', action="store_true", help='Interpolate and Fill Gaps in bin averaged data') args = parser.parse_args() print "Working on {0}".format(args.gridded_file) df = EcoFOCI_netCDF(args.gridded_file) vars_dic = df.get_vars() ncdata = df.ncreadfile_dic() df.close() depth_array = np.arange(0, args.depth_m + 1, 1) ###build empty array gridarray = np.ones((len(ncdata['time']), len(depth_array))) * np.nan for i, v in enumerate(ncdata['depth']): if v in ncdata['depth']: print "copying {0}".format(v) gridarray[:, int(v)] = ncdata[args.plot_var][:, i, 0, 0] extent = [ np.min(ncdata['time']),
def repl_var(nchandle, var_name, val=1e35): nchandle.variables[var_name][:] = np.ones_like(nchandle.variables[var_name][:]) * float(val) return """------------------------------- MAIN--------------------------------------------""" parser = argparse.ArgumentParser(description='Replace EPIC Variable with 1e35 for all depths') parser.add_argument('sourcefile', metavar='sourcefile', type=str, help='complete path to netcdf file') parser.add_argument('user_var', metavar='user_var', type=str, help='EPIC Key Code or variable name') parser.add_argument('Value', metavar='Value', type=str, help='replacement value') args = parser.parse_args() ###nc readin ncfile = args.sourcefile df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() vars_dic = df.get_vars() data = df.ncreadfile_dic() print ncfile.split('/')[-1] repl_var(df._getnchandle_(), args.user_var, val=args.Value) df.close()
parser = argparse.ArgumentParser( description='Convert DegreesWest to DegreesEast inplace') parser.add_argument('sourcefile', metavar='sourcefile', type=str, help='complete path to netcdf file') parser.add_argument('-m360', '--m360', action="store_true", help='make range 0-360') args = parser.parse_args() ###nc readin ncfile = args.sourcefile df = EcoFOCI_netCDF(ncfile) global_atts = df.get_global_atts() vars_dic = df.get_vars() data = df.ncreadfile_dic() if 'lon' in df.variables.keys(): if args.m360: df.variables['lon'][:] = -1. * df.variables['lon'][:] + 360 else: df.variables['lon'][:] = -1. * df.variables['lon'][:] vars_dic['lon'].units = 'degree_east' elif 'longitude' in df.variables.keys(): if args.m360: df.variables['longitude'][:] = -1. * df.variables['longitude'][:] + 360 else: df.variables['longitude'][:] = -1. * df.variables['longitude'][:]
""" import datetime import numpy as np import gsw from io_utils.EcoFOCI_netCDF_read import EcoFOCI_netCDF #godas is a function of time, depth, lat, lon (12, 40, 418, 360) godas_ptemp = '/Volumes/WDC_internal/Users/bell/Data_Local/Reanalysis_Files/GODAS/pottmp.1980.nc' godas_sal = '/Volumes/WDC_internal/Users/bell/Data_Local/Reanalysis_Files/GODAS/salt.1980.nc' #GODAS PTEMP df = EcoFOCI_netCDF(godas_ptemp) global_atts = df.get_global_atts() vars_dic = df.get_vars() gd_ptmp = df.ncreadfile_dic() df.close() #GODAS SAL df = EcoFOCI_netCDF(godas_sal) global_atts = df.get_global_atts() vars_dic = df.get_vars() gd_sal = df.ncreadfile_dic() df.close() #ABS Sal f(sal, pres, lat, lon) #pressure needs to be determined from depth
help='Trim: start and end boundarys for trimming (inclusive)\ Format: yyyy-mm-ddThh:mm:ss start-date end-date') args = parser.parse_args() if args.featureType: featureType=args.featureType else: featureType='' if args.operation in ['CF','CF Convert','CF_Convert']: #generates near file if args.is2D: df = EcoFOCI_netCDF( args.sourcefile ) global_atts = df.get_global_atts() vars_dic = df.get_vars() ncdata = df.ncreadfile_dic() #Convert two word EPIC time to python datetime.datetime representation and then format for CF standards dt_from_epic = EPIC2Datetime(ncdata['time'], ncdata['time2']) if args.time_since_str: time_since_str = " ".join(args.time_since_str) CF_time = get_UDUNITS(dt_from_epic,time_since_str) else: time_since_str = 'days since 1900-01-01' CF_time = get_UDUNITS(dt_from_epic,time_since_str) try: History=global_atts['History']