def convert_file(self, file, title="MODIS SST Obs"): """ Load an MODIS file and convert into an obs structure """ # Load MODIS Data import re nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(nc.variables["sst"][:], self.temp_limits[0], self.temp_limits[1]) err = np.ones(dat.shape) * self.temp_error time = seapy.date2day(datetime.datetime.strptime( re.sub('\.[0-9]+Z$', '', nc.time_coverage_end), "%Y-%m-%dT%H:%M:%S"), self.epoch) # Check the data flags flags = np.ma.masked_not_equal(nc.variables["qual_sst"][:], 0) dat[flags.mask] = np.ma.masked nc.close() if self.grid.east(): lon[lon < 0] += 360 lon, lat = np.meshgrid(lon, lat) good = dat.nonzero() lat = lat[good] lon = lon[good] data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err[good], self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, title)
def from_roms(roms_file, bry_file, grid=None, records=None): """ Given a ROMS history, average, or climatology file, generate boundary conditions on the same grid. Parameters ---------- roms_file : string or list, ROMS source (history, average, climatology file) bry_file : string, output boundary file grid : seapy.model.grid or string, optional, ROMS grid for boundaries records : array, optional record indices to put into the boundary Returns ------- None """ if grid is None: grid = seapy.model.asgrid(roms_file) else: grid = seapy.model.asgrid(grid) ncroms = seapy.netcdf(roms_file) src_ref, time = seapy.roms.get_reftime(ncroms) records = np.arange(0, len(ncroms.variables[time][:])) \ if records is None else records # Create the boundary file and fill up the descriptive data ncbry = seapy.roms.ncgen.create_bry(bry_file, eta_rho=grid.eta_rho, xi_rho=grid.xi_rho, s_rho=grid.n, reftime=src_ref, title="generated from " + roms_file) brytime = seapy.roms.get_timevar(ncbry) grid.to_netcdf(ncbry) ncbry.variables["bry_time"][:] = netCDF4.date2num( netCDF4.num2date(ncroms.variables[time][records], ncroms.variables[time].units), ncbry.variables[brytime].units) for var in seapy.roms.fields: if var in ncroms.variables: for bry in sides: ndim = seapy.roms.fields[var]["dims"] if ndim == 3: ncbry.variables["_".join((var, bry))][:] = \ ncroms.variables[var][records, :, sides[bry].indices[0], sides[bry].indices[1]] elif ndim == 2: ncbry.variables["_".join((var, bry))][:] = \ ncroms.variables[var][records, sides[bry].indices[0], sides[bry].indices[1]] ncbry.close() pass
def convert_file(self, file, title="AVISO Obs"): """ Load an AVISO file and convert into an obs structure """ # Load AVISO Data nc = seapy.netcdf(file) lonname = 'lon' if 'lon' in nc.variables.keys() else 'longitude' lon = nc.variables[lonname][:] latname = 'lat' if 'lat' in nc.variables.keys() else 'latitude' lat = nc.variables[latname][:] dat = np.squeeze(nc.variables["sla"][:]) err = np.squeeze(nc.variables["err"][:]) time = seapy.roms.get_time( nc, "time", records=[0], epoch=self.epoch)[0] nc.close() lon, lat = np.meshgrid(lon, lat) lat = lat.flatten() lon = lon.flatten() if not self.grid.east(): lon[lon > 180] -= 360 data = [seapy.roms.obs.raw_data("ZETA", "SSH_AVISO_MAP", dat.flatten(), err.flatten(), self.ssh_error)] # Grid it obs = seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, title) # Apply the model mean ssh to the sla data if self.ssh_mean is not None: m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean, obs.x, obs.y, nx=1, ny=1, weight=7) obs.value += m return obs
def _initfile(self): """ Using an input file, try to load as much information as can be found in the given file. Parameters ---------- None Returns ------- None : sets attributes in grid """ # Define a dictionary to go through and convert netcdf variables # to internal class attributes gvars = {"lat_rho": ["lat_rho", "lat", "latitude", "y_rho"], "lon_rho": ["lon_rho", "lon", "longitude", "x_rho"], "lat_u": ["lat_u", "y_u"], "lon_u": ["lon_u", "x_u"], "lat_v": ["lat_v", "y_v"], "lon_v": ["lon_v", "x_v"], "mask_rho": ["mask_rho", "mask"], "mask_u": ["mask_u"], "mask_v": ["mask_v"], "angle": ["angle"], "h": ["h"], "n": ["N"], "theta_s": ["theta_s"], "theta_b": ["theta_b"], "tcline": ["Tcline"], "hc": ["hc"], "vtransform": ["Vtransform"], "vstretching": ["Vstretching"], "s_rho": ["s_rho"], "cs_r": ["Cs_r"], "f": ["f"], "pm": ["pm"], "pn": ["pn"], "z": ["z", "depth", "lev"] } # Open the file self._nc = seapy.netcdf(self.filename) try: self.name = re.search("[^\.]*", os.path.basename(self.filename)).group() except: self.name = "untitled" self.key = {} for var in gvars: for inp in gvars[var]: if inp in self._nc.variables: self.key[var] = inp self.__dict__[var] = self._nc.variables[inp][:] break # Close the file self._nc.close() self._nc = None
def convert_file(self, file, title="OSTIA SST Obs"): """ Load an OSTIA file and convert into an obs structure """ # Load OSTIA Data nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze( nc.variables["analysed_sst"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside(np.squeeze( nc.variables["analysis_error"][:]), 0.01, 2.0) dat[err.mask] = np.ma.masked time = seapy.roms.num2date( nc, "time", records=[0], epoch=self.epoch)[0] nc.close() if self.grid.east(): lon[lon < 0] += 360 lon, lat = np.meshgrid(lon, lat) good = dat.nonzero() lat = lat[good] lon = lon[good] data = [seapy.roms.obs.raw_data("TEMP", "SST_OSTIA", dat.compressed(), err[good], self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, title)
def convert_file(self, file, title="OSTIA SST Obs"): """ Load an OSTIA file and convert into an obs structure """ # Load OSTIA Data nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze( nc.variables["analysed_sst"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside(np.squeeze( nc.variables["analysis_error"][:]), 0.01, 2.0) dat[err.mask] = np.ma.masked time = netCDF4.num2date(nc.variables["time"][0], nc.variables["time"].units) - self.epoch time = time.total_seconds() * seapy.secs2day nc.close() if self.grid.east(): lon[lon < 0] += 360 lon, lat = np.meshgrid(lon, lat) good = dat.nonzero() lat = lat[good] lon = lon[good] data = [seapy.roms.obs.raw_data("TEMP", "SST_OSTIA", dat.compressed(), err[good], self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, title)
def convert_file(self, file, title="AVISO Obs"): """ Load an AVISO file and convert into an obs structure """ # Load AVISO Data nc = seapy.netcdf(file) lonname = 'lon' if 'lon' in nc.variables.keys() else 'longitude' lon = nc.variables[lonname][:] latname = 'lat' if 'lat' in nc.variables.keys() else 'latitude' lat = nc.variables[latname][:] dat = np.squeeze(nc.variables["sla"][:]) err = np.squeeze(nc.variables["err"][:]) time = netCDF4.num2date(nc.variables["time"][0], nc.variables["time"].units) - self.epoch time = time.total_seconds() * seapy.secs2day nc.close() lon, lat = np.meshgrid(lon, lat) lat = lat.flatten() lon = lon.flatten() if not self.grid.east(): lon[lon > 180] -= 360 data = [seapy.roms.obs.raw_data("ZETA", "SSH_AVISO_MAP", dat.flatten(), err.flatten(), self.ssh_error)] # Grid it obs = seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, title) # Apply the model mean ssh to the sla data if self.ssh_mean is not None: m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean, obs.x, obs.y, nx=1, ny=1, weight=7) obs.value += m return obs
def from_std(std_filename, bry_std_file, fields=None): """ Generate the boundary standard deviations file for 4DVAR from the standard deviation of a boundary file. Best to use nco: $ ncwa -a bry_time roms_bry_file tmp.nc $ ncbo -O -y sub roms_bry_file tmp.nc tmp.nc $ ncra -y rmssdn tmp.nc roms_bry_std.nc to generate the standard deviations. This method simply takes the standard deviations of the boundaries and puts them into the proper format for ROMS 4DVAR. Parameters ---------- std_filename : string or list, Filename of the boundary standard deviation file bry_std_file : string, Filename of the boundary standard deviations file to create fields : list, optional, ROMS fields to generate boundaries for. The default are the standard fields as defined in seapy.roms.fields Returns ------- None """ ncstd = seapy.netcdf(std_filename) eta_rho = len(ncstd.dimensions["eta_rho"]) xi_rho = len(ncstd.dimensions["xi_rho"]) s_rho = len(ncstd.dimensions["s_rho"]) ncout = seapy.roms.ncgen.create_da_bry_std(bry_std_file, eta_rho=eta_rho, xi_rho=xi_rho, s_rho=s_rho, title='STD from ' + std_filename) ncout.variables["ocean_time"][:] = ncstd.variables["bry_time"][0] if fields is None: fields = seapy.roms.fields # Go over every side for every field and put it together for var in fields: vname = var + "_obc" if vname not in ncout.variables: ncout.createVariable(vname, np.float32, ('ocean_time', "boundary", "s_rho", "IorJ")) ndat = np.zeros(ncout.variables[vname].shape) for bry in sides: order = sides[bry].order - 1 dat = ncstd.variables[var + "_" + bry][0, :] if dat.ndim == 1: ndat[0, order, :len(dat)] = dat else: ndat[0, order, :, :dat.shape[1]] = dat ncout.variables[vname][:] = ndat ncout.sync() pass
def convert_file(self, file, title="TAO Obs"): """ Load a TAO netcdf file and convert into an obs structure """ vals = {"temp": ["T_20", "QT_5020"], "salt": ["S_41", "QS_5041"], "u": ["U_320", "QS_5300"], "v": ["V_321", "QS_5300"]} nc = seapy.netcdf(file) lat = nc.variables["lat"][:] lon = nc.variables["lon"][:] if not self.grid.east(): lon[lon > 180] -= 360 lat, lon = np.meshgrid(lat, lon) time = netCDF4.num2date(nc.variables["time"][:], nc.variables["time"].units) - self.epoch time = list(map(lambda x: x.total_seconds() * seapy.secs2day, time)) depth = -nc.variables["depth"][:] profile_list = np.where(np.logical_and.reduce(( lon >= np.min(self.grid.lon_rho), lon <= np.max(self.grid.lon_rho), lat >= np.min(self.grid.lat_rho), lat <= np.max(self.grid.lat_rho)))) # If nothing is in the area, return nothing if not profile_list[0].size: return None # Process each of the variables that are present obsdata = [] for field in vals: limit = getattr(self, field + '_limits') if vals[field][0] in nc.variables: data = nc.variables[vals[field][0]][:] data = np.ma.masked_outside( data[profile_list[0], profile_list[1], :, :], limit[0], limit[1], copy=False) qc = nc.variables[vals[field][1]][:] qc = qc[profile_list[0], profile_list[1], :, :] bad = np.where(np.logical_and(qc != 1, qc != 2)) data[bad] = np.ma.masked obsdata.append(seapy.roms.obs.raw_data(field, "TAO_ARRAY", data.compressed(), None, getattr(self, field + '_error'))) nc.close() # Build the time, lon, lat, and depth arrays of appropriate size npts = profile_list[0].size ndep = depth.size nt = len(time) lat = np.resize(lat[profile_list], (nt, ndep, npts)) lat = np.squeeze(np.transpose(lat, (2, 1, 0)))[~data.mask] lon = np.resize(lon[profile_list], (nt, ndep, npts)) lon = np.squeeze(np.transpose(lon, (2, 1, 0)))[~data.mask] depth = np.resize(depth, (npts, nt, ndep)) depth = np.squeeze(np.transpose(depth, (0, 2, 1)))[~data.mask] time = np.squeeze(np.resize(time, (npts, ndep, nt)))[~data.mask] return seapy.roms.obs.gridder(self.grid, time, lon, lat, depth, obsdata, self.dt, title)
def from_roms(roms_file, ini_file, record=0, time=None, grid=None, clobber=False, cdl=None): """ Given a ROMS history, average, or climatology file, generate initial conditions on the same grid. Parameters ---------- roms_file: string or list Input ROMS source (history, average, climatology file) ini_file: string Input name for output initial condition file record: int Input index to use as initial condition time: datetime optional Input datetime to use for the initial condition (default to record time) grid: seapy.model.grid or string, optional Input ROMS grid: specify the grid if loaded or filename to load clobber: bool, optional If True, clobber any existing files and recreate. If False, use the existing file definition cdl: string, optional, Use the specified CDL file as the definition for the new netCDF file. Returns ------- None """ # Load the grid if grid is None: grid = seapy.model.asgrid(roms_file) else: grid = seapy.model.asgrid(grid) ncroms = seapy.netcdf(roms_file) src_ref, romstime = seapy.roms.get_reftime(ncroms) # Create the initial file and fill up the descriptive data ncini = seapy.roms.ncgen.create_ini(ini_file, eta_rho=grid.eta_rho, xi_rho=grid.xi_rho, s_rho=grid.n, reftime=src_ref, clobber=clobber, cdl=cdl, title="generated from " + roms_file) grid.to_netcdf(ncini) if time is None: time = seapy.roms.num2date(ncroms, romstime, record) ncini.variables["ocean_time"][:] = seapy.roms.date2num( time, ncini, "ocean_time") # Fill up the initial state with the roms file data for var in seapy.roms.fields: if var in ncini.variables and var in ncroms.variables: ncini.variables[var][0, :] = ncroms.variables[var][record, :] # Close up ncini.close() ncroms.close() pass
def convert_file(self, file, title="TAO Obs"): """ Load a TAO netcdf file and convert into an obs structure """ vals = {"temp": ["T_20", "QT_5020"], "salt": ["S_41", "QS_5041"], "u": ["U_320", "QS_5300"], "v": ["V_321", "QS_5300"]} nc = seapy.netcdf(file) lat = nc.variables["lat"][:] lon = nc.variables["lon"][:] if not self.grid.east(): lon[lon > 180] -= 360 lat, lon = np.meshgrid(lat, lon) time = seapy.roms.num2date(nc, "time", epoch=self.epoch) depth = -nc.variables["depth"][:] profile_list = np.where(np.logical_and.reduce(( lon >= np.min(self.grid.lon_rho), lon <= np.max(self.grid.lon_rho), lat >= np.min(self.grid.lat_rho), lat <= np.max(self.grid.lat_rho)))) # If nothing is in the area, return nothing if not profile_list[0].size: return None # Process each of the variables that are present obsdata = [] for field in vals: limit = getattr(self, field + '_limits') if vals[field][0] in nc.variables: data = nc.variables[vals[field][0]][:] data = np.ma.masked_outside( data[profile_list[0], profile_list[1], :, :], limit[0], limit[1], copy=False) qc = nc.variables[vals[field][1]][:] qc = qc[profile_list[0], profile_list[1], :, :] bad = np.where(np.logical_and(qc != 1, qc != 2)) data[bad] = np.ma.masked obsdata.append(seapy.roms.obs.raw_data(field, "TAO_ARRAY", data.compressed(), None, getattr(self, field + '_error'))) nc.close() # Build the time, lon, lat, and depth arrays of appropriate size npts = profile_list[0].size ndep = depth.size nt = len(time) lat = np.resize(lat[profile_list], (nt, ndep, npts)) lat = np.squeeze(np.transpose(lat, (2, 1, 0)))[~data.mask] lon = np.resize(lon[profile_list], (nt, ndep, npts)) lon = np.squeeze(np.transpose(lon, (2, 1, 0)))[~data.mask] depth = np.resize(depth, (npts, nt, ndep)) depth = np.squeeze(np.transpose(depth, (0, 2, 1)))[~data.mask] time = np.squeeze(np.resize(time, (npts, ndep, nt)))[~data.mask] return seapy.roms.obs.gridder(self.grid, time, lon, lat, depth, obsdata, self.dt, title)
def gen_direct_forcing(his_file, frc_file): """ Generate a direct forcing file from a history (or other ROMS output) file. It requires that sustr, svstr, shflux, and ssflux (or swflux) with salt be available. This will generate a forcing file that contains: sustr, svstr, swflux, and ssflux. Parameters ---------- his_file: string, The ROMS history (or other) file(s) (can use wildcards) that contains the fields to make forcing from frc_file: string, The output forcing file Returns ------- None: Generates an output file of bulk forcings """ import os infile = seapy.netcdf(his_file) ref, _ = seapy.roms.get_reftime(infile) # Create the output file nc = seapy.roms.ncgen.create_frc_direct(frc_file, eta_rho=infile.dimensions[ 'eta_rho'].size, xi_rho=infile.dimensions[ 'xi_rho'].size, reftime=ref, clobber=True, title="Forcing from " + os.path.basename(his_file)) # Copy the data over nc.variables['SSS'][:] = infile.variables['salt'][:, -1, :, :] if 'EminusP' in infile.variables: nc.variables['swflux'][:] = infile.variables['EminusP'][:] elif 'swflux' in infile.variables: nc.variables['swflux'][:] = infile.variables['swflux'][:] else: nc.variables['swflux'][:] = infile.variables['ssflux'][:] \ / nc.variables['SSS'][:] nc.variables['frc_time'][:] = netCDF4.date2num(netCDF4.num2date( infile.variables['ocean_time'][:], infile.variables['ocean_time'].units), nc.variables['frc_time'].units) for f in seapy.progressbar.progress(("sustr", "svstr", "shflux", "swrad", "lat_rho", "lat_u", "lat_v", "lon_rho", "lon_u", "lon_v")): if f in infile.variables: nc.variables[f][:] = seapy.convolve_mask( infile.variables[f][:], copy=False) nc.close()
def tide_error(his_file, tide_file, grid=None): """ Calculates the tidal error for each point given a model history and the tidal file used Parameters ---------- his_file : string, String of history file location. Can be multiple files using wildcard tide_file: string, String of tidal file location grid : string or grid, optional, If specified, use this grid. Default is to build a grid from the history file. Returns ------- tide_error : masked_array, Array containing the tidal error at each point, with land points masked """ if grid: grid = seapy.model.asgrid(grid) else: grid = seapy.model.asgrid(his_file) # Load tidal file data frc = load_forcing(tide_file) # Calculate tidal error for each point nc = seapy.netcdf(his_file) times = seapy.roms.num2date(nc) tide_error = np.ma.masked_where(grid.mask_rho == 0, np.zeros((grid.mask_rho.shape))) zeta = nc.variables['zeta'][:] nc.close() for i in track(range(grid.ln)): for j in range(grid.lm): if not tide_error.mask[i, j]: z = zeta[:, i, j] t_ap = seapy.tide.pack_amp_phase(frc['tides'], frc['Eamp'][:, i, j], frc['Ephase'][:, i, j]) mout = seapy.tide.fit(times, z, tides=frc['tides'], lat=grid.lat_rho[i, j], tide_start=frc['tide_start']) for c in t_ap: m = mout['major'][c] t = t_ap[c] tide_error[i, j] += 0.5 * (m.amp**2 + t.amp**2) - \ m.amp * t.amp * np.cos(m.phase - t.phase) tide_error[i, j] = np.sqrt(tide_error[i, j]) return tide_error
def datespan_file(self, file): nc = seapy.netcdf(file) try: st = datetime.datetime.strptime(nc.getncattr("time_coverage_start"), "%Y-%m-%dT%H:%M:%SZ") en = datetime.datetime.strptime(nc.getncattr("time_coverage_end"), "%Y-%m-%dT%H:%M:%SZ") except: st = en = None pass finally: nc.close() return st, en
def datespan_file(self, file): nc = seapy.netcdf(file) try: st = datetime.datetime.strptime(nc.getncattr("start_date"), "%Y-%m-%d UTC") en = datetime.datetime.strptime(nc.getncattr("stop_date"), "%Y-%m-%d UTC") except: st = en = None pass finally: nc.close() return st, en
def tide_error(his_file, tide_file, grid=None): """ Calculates the tidal error for each point given a model history and the tidal file used Parameters ---------- his_file : string, String of history file location. Can be multiple files using wildcard tide_file: string, String of tidal file location grid : string or grid, optional, If specified, use this grid. Default is to build a grid from the history file. Returns ------- tide_error : masked_array, Array containing the tidal error at each point, with land points masked """ if grid: grid = seapy.model.asgrid(grid) else: grid = seapy.model.asgrid(his_file) # Load tidal file data frc = load_forcing(tide_file) # Calculate tidal error for each point nc = seapy.netcdf(his_file) times = seapy.roms.num2date(nc) tide_error = np.ma.masked_where( grid.mask_rho == 0, np.zeros((grid.mask_rho.shape))) zeta = nc.variables['zeta'][:] nc.close() for i in seapy.progressbar.progress(range(grid.ln)): for j in range(grid.lm): if not tide_error.mask[i, j]: z = zeta[:, i, j] t_ap = seapy.tide.pack_amp_phase(frc['tides'], frc['Eamp'][:, i, j], frc['Ephase'][:, i, j]) mout = seapy.tide.fit(times, z, tides=frc['tides'], lat=grid.lat_rho[i, j], tide_start=frc['tide_start']) for c in t_ap: m = mout['major'][c] t = t_ap[c] tide_error[i, j] += 0.5 * (m.amp**2 + t.amp**2) - \ m.amp * t.amp * np.cos(m.phase - t.phase) tide_error[i, j] = np.sqrt(tide_error[i, j]) return tide_error
def load_forcing(filename): """ Load a tidal forcing file into a dictionary Parameters ---------- filename: string File name of the tidal forcing file to load Returns ------- dict: Dictionary of the tidal forcing information with keys: Eamp : SSH amplitdue Ephase : SSH phase (radians) Cmajor : velocity major ellipse Cminor : velocity minor ellipse Cphase : velocity ellipse phase (radians) Cangle : velocity ellipse angle (radians) tide_start : datetime of the tide reference tides : list of the tides """ import re nc = seapy.netcdf(filename) frc = {} frc['Eamp'] = nc.variables['tide_Eamp'][:] frc['Ephase'] = np.radians(nc.variables['tide_Ephase'][:]) frc['Cmajor'] = nc.variables['tide_Cmax'][:] frc['Cminor'] = nc.variables['tide_Cmin'][:] frc['Cphase'] = np.radians(nc.variables['tide_Cphase'][:]) frc['Cangle'] = np.radians(nc.variables['tide_Cangle'][:]) start_str = getattr(nc, 'tide_start', None) or \ getattr(nc, 'base_date', None) tides = getattr(nc, 'tidal_constituents', None) or \ getattr(nc, 'tides', None) frc['tides'] = tides.upper().split(", ") frc['tide_start'] = None nc.close() if start_str: try: frc['tide_start'] = datetime.datetime.strptime( re.sub('^.*since\s*', '', start_str), "%Y-%m-%d %H:%M:%S") except ValueError: pass return frc
def datespan_file(self, file): """ return the just the day that this argo file covers """ nc = seapy.netcdf(file) try: d = netCDF4.num2date(nc.variables['JULD'][0], nc.variables['JULD'].units) st = datetime.datetime(*d.timetuple()[:3]) en = datetime.datetime(*d.timetuple()[:3] + (23, 59, 59)) except: st = en = None pass finally: nc.close() return st, en
def convert_file(self, file, title="REMSS SST Obs"): """ Load an REMSS file and convert into an obs structure """ # Load REMSS Data nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze( nc.variables["sea_surface_temperature"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside(np.squeeze( nc.variables["SSES_standard_deviation_error"][:]), 0.01, 2.0) dat[err.mask] = np.ma.masked # Check the data flags flags = np.ma.masked_not_equal( np.squeeze(nc.variables["rejection_flag"][:]), 0) dat[flags.mask] = np.ma.masked err[flags.mask] = np.ma.masked # Grab the observation time time = netCDF4.num2date(nc.variables["time"][:], nc.variables["time"].units) time = np.array([(t - self.epoch).total_seconds() * seapy.secs2day for t in time]) sst_time = nc.variables["sst_dtime"][:] * seapy.secs2day for n, i in enumerate(time): sst_time[n, :, :] += i sst_time[dat.mask] = np.ma.masked # Set up the coordinate lon, lat = np.meshgrid(lon, lat) lon = np.ma.masked_where(dat.mask, seapy.adddim(lon, len(time))) lat = np.ma.masked_where(dat.mask, seapy.adddim(lat, len(time))) nc.close() if self.grid.east(): lon[lon < 0] += 360 data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err.compressed(), self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, sst_time.compressed(), lon.compressed(), lat.compressed, None, data, self.dt, title)
def convert_file(self, file, title="AVISO SLA Track Obs"): """ Load an AVISO file and convert into an obs structure """ # Load AVISO Data nc = seapy.netcdf(file) lon = nc.variables["longitude"][:] lat = nc.variables["latitude"][:] slaname = 'SLA' if 'SLA' in nc.variables.keys() else 'sla_filtered' dat = nc.variables[slaname][:] time = seapy.roms.num2date(nc, "time", epoch=self.epoch) nc.close() # make them into vectors lat = lat.ravel() lon = lon.ravel() dat = dat.ravel() err = np.ones(dat.shape) * _aviso_sla_errors.get(self.provenance, 0.1) if not self.grid.east(): lon[lon > 180] -= 360 good = dat.nonzero() data = [seapy.roms.obs.raw_data("ZETA", self.provenance, dat[good], err[good], err[0])] # Grid it obs = seapy.roms.obs.gridder(self.grid, time, lon[good], lat[good], None, data, self.dt, title) # Apply the model mean ssh to the sla data if self.ssh_mean is not None and obs is not None: m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean, obs.x, obs.y, nx=1, ny=1, weight=7) obs.value += m # Duplicate the observations before and after as per the repeat # time unless it is zero if self.repeat and obs: prior = obs.copy() after = obs.copy() prior.time -= self.repeat / 24 after.time += self.repeat / 24 obs.add(prior) obs.add(after) return obs
def convert_file(self, file, title="AVISO SLA Track Obs"): """ Load an AVISO file and convert into an obs structure """ # Load AVISO Data nc = seapy.netcdf(file) lon = nc.variables["longitude"][:] lat = nc.variables["latitude"][:] slaname = 'SLA' if 'SLA' in nc.variables.keys() else 'sla_filtered' dat = nc.variables[slaname][:] time = seapy.roms.get_time(nc, "time", epoch=self.epoch) nc.close() # make them into vectors lat = lat.ravel() lon = lon.ravel() dat = dat.ravel() err = np.ones(dat.shape) * _aviso_sla_errors.get(self.provenance, 0.1) if not self.grid.east(): lon[lon > 180] -= 360 good = dat.nonzero() data = [seapy.roms.obs.raw_data("ZETA", self.provenance, dat[good], err[good], err[0])] # Grid it obs = seapy.roms.obs.gridder(self.grid, time, lon[good], lat[good], None, data, self.dt, title) # Apply the model mean ssh to the sla data if self.ssh_mean is not None and obs is not None: m, p = seapy.oasurf(self.grid.I, self.grid.J, self.ssh_mean, obs.x, obs.y, nx=1, ny=1, weight=7) obs.value += m # Duplicate the observations before and after as per the repeat # time unless it is zero if self.repeat and obs: prior = obs.copy() after = obs.copy() prior.time -= self.repeat / 24 after.time += self.repeat / 24 obs.add(prior) obs.add(after) return obs
def set_mask_h(self, fld=None): """ Compute the mask and h array from a z-level model Parameters ---------- fld : np.array 3D array of values (such as temperature) to analyze to determine where the bottom and land lie Returns ------- None : sets mask and h attributes in grid """ if hasattr(self, "mask_rho") or self.cgrid: return if fld is None and self.filename is not None: if self._nc is None: self._nc = seapy.netcdf(self.filename) # Try to load a field from the file for f in ["temp", "temperature", "water_temp"]: if f in self._nc.variables: fld = self._nc.variables[f][0, :, :, :] fld = np.ma.array(fld, mask=np.isnan(fld)) break # Close the file self._nc.close() # If we don't have a field to examine, then we cannot compute the # mask and bathymetry if fld is None: warn("Missing 3D field to evaluate.") return # Next, we go over the field to examine the depths and mask self.h = np.zeros(self.lat_rho.shape) self.mask_rho = np.zeros(self.lat_rho.shape) for k in range(self.z.size): water = np.nonzero(np.logical_not(fld.mask[k, :, :])) self.h[water] = self.z[k] if k == 0: self.mask_rho[water] = 1.0 self.mask_u = self.mask_v = self.mask_rho
def set_mask_h(self, fld=None): """ Compute the mask and h array from a z-level model Parameters ---------- fld : np.array 3D array of values (such as temperature) to analyze to determine where the bottom and land lie Returns ------- None : sets mask and h attributes in grid """ if hasattr(self, "mask_rho") or self.cgrid: return if fld is None and self.filename is not None: if self._nc is None: self._nc = seapy.netcdf(self.filename) # Try to load a field from the file for f in ["temp", "temperature", "water_temp", "fed"]: if f in self._nc.variables: fld = self._nc.variables[f][0, :, :, :] fld = np.ma.array(fld, mask=np.isnan(fld)) break # Close the file self._nc.close() # If we don't have a field to examine, then we cannot compute the # mask and bathymetry if fld is None: warn("Missing 3D field to evaluate.") return # Next, we go over the field to examine the depths and mask self.h = np.zeros(self.lat_rho.shape) self.mask_rho = np.zeros(self.lat_rho.shape) for k in range(self.z.size): water = np.nonzero(np.logical_not(fld.mask[k, :, :])) self.h[water] = self.z[k] if k == 0: self.mask_rho[water] = 1.0 self.mask_u = self.mask_v = self.mask_rho
def convert_file(self, file, title="VIIRS SST Obs"): """ Load a VIIRS file and convert into an obs structure """ # Load VIIRS Data nc = seapy.netcdf(file, aggdim="time") lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside( nc.variables["sea_surface_temperature"][:] - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside( nc.variables["sses_standard_deviation"][:], 0.01, 2.0) dat[err.mask] = np.ma.masked # Check the data flags if self.check_qc_flags: flags = np.ma.masked_not_equal( nc.variables["quality_level"][:], 5) dat[flags.mask] = np.ma.masked else: dat = np.ma.masked_where( nc.variables["quality_level"][:].data == 1, dat) # Grab the observation time time = netCDF4.num2date(nc.variables["time"][:], nc.variables["time"].units) - self.epoch time = np.asarray([x.total_seconds() for x in time])[ :, np.newaxis, np.newaxis] dtime = nc.variables["sst_dtime"][:] time = (time + dtime) * seapy.secs2day nc.close() # Set up the coordinate lon = np.ma.masked_where(dat.mask, seapy.adddim(lon, len(time))) lat = np.ma.masked_where(dat.mask, seapy.adddim(lat, len(time))) if self.grid.east(): lon[lon < 0] += 360 good = dat.nonzero() data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err[good], self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, time[good], lon[good], lat[good], None, data, self.dt, title)
def convert_file(self, file, title="REMSS SST Obs"): """ Load an REMSS file and convert into an obs structure """ # Load REMSS Data nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze( nc.variables["sea_surface_temperature"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside(np.squeeze( nc.variables["SSES_standard_deviation_error"][:]), 0.01, 2.0) dat[err.mask] = np.ma.masked # Check the data flags flags = np.ma.masked_not_equal( np.squeeze(nc.variables["rejection_flag"][:]), 0) dat[flags.mask] = np.ma.masked err[flags.mask] = np.ma.masked # Grab the observation time time = seapy.roms.num2date(nc, "time", epoch=self.epoch) sst_time = nc.variables["sst_dtime"][:] * seapy.secs2day for n, i in enumerate(time): sst_time[n, :, :] += i sst_time[dat.mask] = np.ma.masked # Set up the coordinate lon, lat = np.meshgrid(lon, lat) lon = np.ma.masked_where(dat.mask, seapy.adddim(lon, len(time))) lat = np.ma.masked_where(dat.mask, seapy.adddim(lat, len(time))) nc.close() if self.grid.east(): lon[lon < 0] += 360 data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err.compressed(), self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, sst_time.compressed(), lon.compressed(), lat.compressed, None, data, self.dt, title)
def convert_file(self, file, title="NAVO SST Obs"): """ Load a NAVO map file and convert into an obs structure """ import re import sys nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze(nc.variables["analysed_sst"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.array(np.squeeze( nc.variables["analysis_error"][:]), mask=dat.mask) # this is an analyzed product and provides errors as a function # of space and time directly the temperature is the bulk # temperature (ie at around 4m depth, below the e-folding depths of # sunlight in the ocean so the product does not have a diuranl cycle # (ie you don;t have to worry about hourly variations) time = netCDF4.num2date(nc.variables["time"][0], nc.variables["time"].units) - self.epoch time = time.total_seconds() * seapy.secs2day nc.close() # here we set the depth to be 4 m below the surface if self.grid.east(): lon[lon < 0] += 360 lon, lat = np.meshgrid(lon, lat) good = dat.nonzero() lat = lat[good] lon = lon[good] data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err[good], self.temp_error)] # Grid it obs = seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, depth_adjust=True, title=title) obs.z *= 0 obs.depth = -self.depth * np.ones(len(obs.depth)) return obs
def convert_file(self, file, title="REMSS SST Obs"): """ Load an REMSS file and convert into an obs structure """ # Load REMSS Data nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze( nc.variables["sea_surface_temperature"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside(np.squeeze( nc.variables["sses_standard_deviation"][:]), 0.01, 2.0) dat[err.mask] = np.ma.masked # Check the data flags if self.check_qc_flags: flags = np.ma.masked_not_equal( np.squeeze(nc.variables["quality_level"][:]), 5) dat[flags.mask] = np.ma.masked else: dat = np.ma.masked_where( np.squeeze(nc.variables["quality_level"][:]).data == 1, dat) # Grab the observation time time = netCDF4.num2date(nc.variables["time"][0], nc.variables["time"].units) - self.epoch dtime = nc.variables["sst_dtime"][:] time = np.squeeze((time.total_seconds() + dtime) * seapy.secs2day) nc.close() if self.grid.east(): lon[lon < 0] += 360 good = dat.nonzero() data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err[good], self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, time[good], lon[good], lat[good], None, data, self.dt, title)
def convert_file(self, file, title="NAVO SST Obs"): """ Load a NAVO map file and convert into an obs structure """ import re import sys nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze(nc.variables["analysed_sst"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.array(np.squeeze( nc.variables["analysis_error"][:]), mask=dat.mask) # this is an analyzed product and provides errors as a function # of space and time directly the temperature is the bulk # temperature (ie at around 4m depth, below the e-folding depths of # sunlight in the ocean so the product does not have a diuranl cycle # (ie you don;t have to worry about hourly variations) time = seapy.roms.num2date( nc, "time", records=[0], epoch=self.epoch)[0] nc.close() # here we set the depth to be 4 m below the surface if self.grid.east(): lon[lon < 0] += 360 lon, lat = np.meshgrid(lon, lat) good = dat.nonzero() lat = lat[good] lon = lon[good] data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err[good], self.temp_error)] # Grid it obs = seapy.roms.obs.gridder(self.grid, time, lon, lat, None, data, self.dt, depth_adjust=True, title=title) obs.z *= 0 obs.depth = -self.depth * np.ones(len(obs.depth)) return obs
def convert_file(self, file, title="REMSS SST Obs"): """ Load an REMSS file and convert into an obs structure """ # Load REMSS Data nc = seapy.netcdf(file) lon = nc.variables["lon"][:] lat = nc.variables["lat"][:] dat = np.ma.masked_outside(np.squeeze( nc.variables["sea_surface_temperature"][:]) - 273.15, self.temp_limits[0], self.temp_limits[1]) err = np.ma.masked_outside(np.squeeze( nc.variables["sses_standard_deviation"][:]), 0.01, 2.0) dat[err.mask] = np.ma.masked # Check the data flags if self.check_qc_flags: flags = np.ma.masked_not_equal( np.squeeze(nc.variables["quality_level"][:]), 5) dat[flags.mask] = np.ma.masked else: dat = np.ma.masked_where( np.squeeze(nc.variables["quality_level"][:]).data == 1, dat) # Grab the observation time time = seapy.roms.num2date(nc, "time", records=[0])[0] - self.epoch dtime = nc.variables["sst_dtime"][:] time = np.squeeze((time.total_seconds() + dtime) * seapy.secs2day) nc.close() if self.grid.east(): lon[lon < 0] += 360 good = dat.nonzero() data = [seapy.roms.obs.raw_data("TEMP", self.provenance, dat.compressed(), err[good], self.temp_error)] # Grid it return seapy.roms.obs.gridder(self.grid, time[good], lon[good], lat[good], None, data, self.dt, title)
def merge_files(obs_files, out_files, days, dt, reftime, limits=None, clobber=True): """ merge together a group of observation files into combined new files with observations that lie only within the corresponding dates Parameters ---------- obs_files : list, List of files to merge together (a single file will work, it will just be filtered by the dates) out_files : list or string, list of the filenames to create for each of the output periods. If a single string is given, the character '#' will be replaced by the starting time of the observation (e.g. out_files="out_#.nc" will become out_03234.nc) days : list of tuples, List of starting and ending day numbers for each cycle to process. The first value is the start day, the second is the end day. The number of tuples is the number of files to output. dt : float, Time separation of observations. Observations that are less than dt apart in time will be set to the same time. reftime : Reference time used to process the observations. The merged files are now timed in relation to the beginning of the assimilation cycle limits : dict, optional Set the limits of the grid points that observations are allowed within, {'north':i, 'south':i, 'east':i, 'west':i }. As obs near the boundaries are not advisable, this allows you to specify the valid grid range to accept obs within. clobber: bool, optional If True, output files are overwritten. If False, they are skipped. Returns ------- None Examples -------- Put together three files into 5 separate files in two day intervals from day 10 through day 20: >>> merge_files(["obs_1.nc", "obs_2.nc", "obs_3.nc"], "new_#.nc", [(i, i+2) for i in range(10, 20, 2)]) Put together same three files into 3 overlapping separate files in five day intervals with one overlapping day: >>> merge_files(["obs_1.nc", "obs_2.nc", "obs_3.nc"], "new_#.nc", [(i, i+5) for i in range(10, 20, 4)]) """ import re import os # Only unique files obs_files = set().union(seapy.flatten(obs_files)) outtime = False if isinstance(out_files, str): outtime = True time = re.compile('\#') # Go through the files to determine which periods they cover myobs = list() sdays = list() edays = list() for file in obs_files: nc = seapy.netcdf(file) fdays = nc.variables['survey_time'][:] nc.close() l = np.where( np.logical_and(fdays >= np.min(days), fdays <= np.max(days)))[0] if not l.size: continue myobs.append(file) sdays.append(fdays[0]) edays.append(fdays[-1]) sdays = np.asarray(sdays) edays = np.asarray(edays) # Loop over the dates in pairs for n, t in enumerate(seapy.progressbar.progress(days)): # Set output file name if outtime: outfile = time.sub("{:05d}".format(t[0]), out_files) else: outfile = out_files[n] if os.path.exists(outfile) and not clobber: continue # Find the files that cover the current period fidx = np.where(np.logical_and(sdays <= t[1], edays >= t[0]))[0] if not fidx.size: continue # Create new observations for this time period nobs = obs(myobs[fidx[0]]) l = np.where(np.logical_or(nobs.time < t[0], nobs.time > t[1])) nobs.delete(l) for idx in fidx[1:]: o = obs(myobs[idx]) l = np.where(np.logical_and(o.time >= t[0], o.time <= t[1])) nobs.add(o[l]) # Remove any limits if limits is not None: l = np.where( np.logical_or.reduce( (nobs.x < limits['west'], nobs.x > limits['east'], nobs.y < limits['south'], nobs.y > limits['north']))) nobs.delete(l) # Make time relative to the assimilation window nobs.reftime = reftime #nobs.reftime = seapy.day2date(t[0],epoch=reftime) #nobs.time = abs(abs(nobs.time) - abs(t[0])) # Save out the new observations nobs.to_netcdf(outfile, dt=dt) pass
def __init__(self, filename=None, time=None, x=None, y=None, z=None, lat=None, lon=None, depth=None, value=None, error=None, type=None, provenance=None, meta=None, title="ROMS Observations"): """ Class to deal with ROMS observations for data assimilation Parameters ---------- filename : string or list, optional, if filename is given, the data are loaded from a netcdf file time : ndarray, optional, time of observation in days x : ndarray, optional, obs location on grid in x (eta) y : ndarray, optional, obs location on grid in y (xi) z : ndarray, optional, obs location on grid in z (positive layers or negative depth [m]) lat : ndarray, optional, obs true latitude [deg] lon : ndarray, optional, obs true longitude [deg] depth : ndarray, optional, obs true depth [m] value : ndarray, optional, obs value [units] error : ndarray, optional, obs error [units**2] type : ndarray, optional, obs type [1-zeta, 2-ubar, 3-vbar, 4-u, 5-v, 6-temp, 7-salt] provenance : ndarray, optional, obs provenance meta : ndarray, optional, obs additional information """ self.title = title if filename is not None: nc = seapy.netcdf(filename) # Construct an array from the data in the file. If obs_meta # exists in the file, then load it; otherwise, fill with zeros self.filename = filename self.time = nc.variables["obs_time"][:] self.x = nc.variables["obs_Xgrid"][:] self.y = nc.variables["obs_Ygrid"][:] self.z = nc.variables["obs_Zgrid"][:] self.lat = nc.variables["obs_lat"][:] self.lon = nc.variables["obs_lon"][:] self.depth = nc.variables["obs_depth"][:] self.value = nc.variables["obs_value"][:] self.error = nc.variables["obs_error"][:] self.type = nc.variables["obs_type"][:] self.provenance = nc.variables["obs_provenance"][:] # Update the provenance definitions try: obs_provenance.update( dict((int(k.strip()), v.strip()) for v, k in (it.split(':') for it in nc.obs_provenance.split(',')))) except (AttributeError, ValueError): pass try: self.meta = nc.variables["obs_meta"][:] except KeyError: self.meta = np.zeros(self.value.size) finally: nc.close() else: self.filename = None if time is not None: self.time = np.atleast_1d(time) if x is not None: self.x = np.atleast_1d(x) if y is not None: self.y = np.atleast_1d(y) if z is not None: self.z = np.atleast_1d(z) if lat is not None: self.lat = np.atleast_1d(lat) if lon is not None: self.lon = np.atleast_1d(lon) if depth is not None: self.depth = np.atleast_1d(depth) if value is not None: self.value = np.atleast_1d(value) if error is not None: self.error = np.atleast_1d(error) if type is not None: self.type = astype(type) if provenance is not None: self.provenance = asprovenance(provenance) else: self.provenance = 0 if meta is not None: self.meta = np.atleast_1d(meta) self._consistent()
def gen_bulk_forcing(infile, fields, outfile, grid, start_time, end_time, epoch=seapy.default_epoch, clobber=False): """ Given a source file (or URL), a dictionary that defines the source fields mapped to the ROMS fields, then it will generate a new bulk forcing file for ROMS. Parameters ---------- infile: string, The source file (or URL) to load the data from fields: dict, A dictionary of the fields to load and process. The dictionary is composed of: "frc_lat":STRING name of latitude field in forcing file "frc_lon":STRING name of longitude field in forcing file "frc_time":STRING name of time field in forcing file "frc_time_units":STRING optional, supply units of frc time field in forcing file keys of ROMS bulk forcing field names (Tair, Pair, Qair, rain, Uwind, Vwind, lwrad_down, swrad) each with an array of values of a named tuple (forcing_data) with the following fields: field: STRING value of the forcing field to use ratio: FLOAT value to multiply with the source data offset: FLOAT value to add to the source data outfile: string, Name of the output file to create grid: seapy.model.grid or string, Grid to use for selecting spatial region start_time: datetime, Starting time of data to process end_time: datetime, Ending time of data to process epoch: datetime, Epoch to use for ROMS times clobber: bool optional, Delete existing file or not, default False Returns ------- None: Generates an output file of bulk forcings Examples -------- To generate GFS forcing for the grid "mygrid.nc" for the year 2014, then use the standard GFS map definitions (and even the built-in GFS archive url): >>> seapy.roms.forcing.gen_bulk_forcing(seapy.roms.forcing.gfs_url, seapy.roms.forcing.gfs_map, 'my_forcing.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2014,1,1)) NCEP reanalysis is trickier as the files are broken up by variable type; hence, a separate file will be created for each variable. We can use the wildcards though to put together multiple time period (e.g., 2014 through 2015). >>> seapy.roms.forcing.gen_bulk_forcing("uwnd.10m.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=True) >>> seapy.roms.forcing.gen_bulk_forcing("vwnd.10m.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=False) >>> seapy.roms.forcing.gen_bulk_forcing("air.2m.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=False) >>> seapy.roms.forcing.gen_bulk_forcing("dlwrf.sfc.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=False) >>> seapy.roms.forcing.gen_bulk_forcing("dswrf.sfc.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=False) >>> seapy.roms.forcing.gen_bulk_forcing("prate.sfc.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=False) >>> seapy.roms.forcing.gen_bulk_forcing("rhum.sig995.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc_rhum_slp.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=True) >>> seapy.roms.forcing.gen_bulk_forcing("slp.*nc", seapy.roms.forcing.ncep_map, 'ncep_frc_rhum_slp.nc', 'mygrid.nc', datetime.datetime(2014,1,1), datetime.datetime(2015,12,31)), clobber=False) Two forcing files, 'ncep_frc.nc' and 'ncep_frc_rhum_slp.nc', are generated for use with ROMS. NOTE: You will have to use 'ncks' to eliminate the empty forcing fields between the two files to prevent ROMS from loading them. """ # Load the grid grid = seapy.model.asgrid(grid) # Open the Forcing data forcing = seapy.netcdf(infile) # Gather the information about the forcing if 'frc_time_units' in fields: frc_time = netCDF4.num2date(forcing.variables[fields['frc_time']][:], fields['frc_time_units']) else: frc_time = netCDF4.num2date(forcing.variables[fields['frc_time']][:], forcing.variables[fields['frc_time']].units) # Figure out the time records that are required time_list = np.where(np.logical_and(frc_time >= start_time, frc_time <= end_time))[0] if not np.any(time_list): raise Exception("Cannot find valid times") # Get the latitude and longitude ranges minlat = np.floor(np.min(grid.lat_rho)) - 0.5 maxlat = np.ceil(np.max(grid.lat_rho)) + 0.5 minlon = np.floor(np.min(grid.lon_rho)) - 0.5 maxlon = np.ceil(np.max(grid.lon_rho)) + 0.5 frc_lon = forcing.variables[fields['frc_lon']][:] frc_lat = forcing.variables[fields['frc_lat']][:] # Make the forcing lat/lon on 2D grid if frc_lon.ndim == 3: frc_lon = np.squeeze(frc_lon[0, :, :]) frc_lat = np.squeeze(frc_lat[0, :, :]) elif frc_lon.ndim == 1: frc_lon, frc_lat = np.meshgrid(frc_lon, frc_lat) # Find the values in our region if not grid.east(): frc_lon[frc_lon > 180] -= 360 region_list = np.where(np.logical_and.reduce(( frc_lon <= maxlon, frc_lon >= minlon, frc_lat <= maxlat, frc_lat >= minlat))) if not np.any(region_list): raise Exception("Cannot find valid region") eta_list = np.s_[np.min(region_list[0]):np.max(region_list[0]) + 1] xi_list = np.s_[np.min(region_list[1]):np.max(region_list[1]) + 1] frc_lon = frc_lon[eta_list, xi_list] frc_lat = frc_lat[eta_list, xi_list] # Create the output file out = seapy.roms.ncgen.create_frc_bulk(outfile, lat=frc_lat.shape[0], lon=frc_lon.shape[1], reftime=epoch, clobber=clobber) out.variables['frc_time'][:] = netCDF4.date2num(frc_time[time_list], out.variables['frc_time'].units) out.variables['lat'][:] = frc_lat out.variables['lon'][:] = frc_lon # Loop over the fields and fill out the output file for f in seapy.progressbar.progress(list(set(fields.keys()) & (out.variables.keys()))): if hasattr(fields[f], 'field'): out.variables[f][:] = \ forcing.variables[fields[f].field][time_list, eta_list, xi_list] * \ fields[f].ratio + fields[f].offset out.sync() out.close()
def __init__(self, filename=None, time=None, x=None, y=None, z=None, lat=None, lon=None, depth=None, value=None, error=None, type=None, provenance=None, meta=None, title="ROMS Observations"): """ Class to deal with ROMS observations for data assimilation Parameters ---------- filename : string or list, optional, if filename is given, the data are loaded from a netcdf file time : ndarray, optional, time of observation in days x : ndarray, optional, obs location on grid in x (eta) y : ndarray, optional, obs location on grid in y (xi) z : ndarray, optional, obs location on grid in z (positive layers or negative depth [m]) lat : ndarray, optional, obs true latitude [deg] lon : ndarray, optional, obs true longitude [deg] depth : ndarray, optional, obs true depth [m] value : ndarray, optional, obs value [units] error : ndarray, optional, obs error [units**2] type : ndarray, optional, obs type [1-zeta, 2-ubar, 3-vbar, 4-u, 5-v, 6-temp, 7-salt] provenance : ndarray, optional, obs provenance meta : ndarray, optional, obs additional information """ self.title = title if filename is not None: nc = seapy.netcdf(filename) # Construct an array from the data in the file. If obs_meta # exists in the file, then load it; otherwise, fill with zeros self.time = nc.variables["obs_time"][:] self.x = nc.variables["obs_Xgrid"][:] self.y = nc.variables["obs_Ygrid"][:] self.z = nc.variables["obs_Zgrid"][:] self.lat = nc.variables["obs_lat"][:] self.lon = nc.variables["obs_lon"][:] self.depth = nc.variables["obs_depth"][:] self.value = nc.variables["obs_value"][:] self.error = nc.variables["obs_error"][:] self.type = nc.variables["obs_type"][:] self.provenance = nc.variables["obs_provenance"][:] # Update the provenance definitions try: obs_provenance.update(dict((int(k.strip()), v.strip()) for v, k in (it.split(':') for it in nc.obs_provenance.split(',')))) except (AttributeError, ValueError): pass try: self.meta = nc.variables["obs_meta"][:] except KeyError: self.meta = np.zeros(self.value.size) finally: nc.close() else: if time is not None: self.time = np.atleast_1d(time) if x is not None: self.x = np.atleast_1d(x) if y is not None: self.y = np.atleast_1d(y) if z is not None: self.z = np.atleast_1d(z) if lat is not None: self.lat = np.atleast_1d(lat) if lon is not None: self.lon = np.atleast_1d(lon) if depth is not None: self.depth = np.atleast_1d(depth) if value is not None: self.value = np.atleast_1d(value) if error is not None: self.error = np.atleast_1d(error) if type is not None: self.type = astype(type) if provenance is not None: self.provenance = asprovenance(provenance) else: self.provenance = 0 if meta is not None: self.meta = np.atleast_1d(meta) self._consistent()
import sys import seapy import numpy as np try: infile = sys.argv[1] outfile = sys.argv[2] except: print("Usage: {:s} input_file output_file".format(sys.argv[0])) sys.exit() print("Convert {:s} to {:s}".format(infile, outfile)) maxrecs = 30 # Get the parameters inc = seapy.netcdf(infile) lat = len(inc.dimensions['lat']) lon = len(inc.dimensions['lon']) epoch, tvar = seapy.roms.get_reftime(inc) # Create the new file onc = seapy.roms.ncgen.create_frc_bulk( outfile, lat=lat, lon=lon, reftime=epoch, clobber=True) # Save the times onc.variables['time'][:] = inc.variables[tvar][:] ntimes = len(onc.dimensions['time']) onc.variables['lat'][:] = inc.variables['lat'][:] onc.variables['lon'][:] = inc.variables['lon'][:] # Copy the variables
def convert_file(self, file, title="Argo Obs"): """ Load an Argo file and convert into an obs structure """ nc = seapy.netcdf(file, aggdim="N_PROF") # Load the position of all profiles in the file lon = nc.variables["LONGITUDE"][:] lat = nc.variables["LATITUDE"][:] pro_q = nc.variables["POSITION_QC"][:].astype(int) # Find the profiles that are in our area with known locations quality if self.grid.east(): lon[lon < 0] += 360 profile_list = np.where(np.logical_and.reduce(( lat >= np.min(self.grid.lat_rho), lat <= np.max(self.grid.lat_rho), lon >= np.min(self.grid.lon_rho), lon <= np.max(self.grid.lon_rho), pro_q == 1)))[0] # Check which are good profiles profile_qc = nc.variables["PROFILE_PRES_QC"][ profile_list].astype('<U1') profile_list = profile_list[profile_qc == 'A'] if not profile_list.size: return None # Load only the data from those in our area julian_day = nc.variables["JULD_LOCATION"][profile_list] argo_epoch = datetime.datetime.strptime(''.join( nc.variables["REFERENCE_DATE_TIME"][:].astype('<U1')), '%Y%m%d%H%M%S') time_delta = (self.epoch - argo_epoch).days file_stamp = datetime.datetime.strptime(''.join( nc.variables["DATE_CREATION"][:].astype('<U1')), '%Y%m%d%H%M%S') # Grab data over the previous day file_time = np.minimum((file_stamp - argo_epoch).days, int(np.max(julian_day))) time_list = np.where(julian_day >= file_time - 1)[0] julian_day = julian_day[time_list] lon = lon[profile_list[time_list]] lat = lat[profile_list[time_list]] profile_list = profile_list[time_list] # Load the data in our region and time temp = nc.variables["TEMP"][profile_list, :] temp_qc = nc.variables["TEMP_QC"][profile_list, :] salt = nc.variables["PSAL"][profile_list, :] salt_qc = nc.variables["PSAL_QC"][profile_list, :] pres = nc.variables["PRES"][profile_list, :] pres_qc = nc.variables["PRES_QC"][profile_list, :] nc.close() # Ensure consistency full_mask = np.logical_or.reduce((temp.mask, salt.mask, pres.mask)) temp[full_mask] = np.ma.masked temp_qc[full_mask] = np.ma.masked salt[full_mask] = np.ma.masked salt_qc[full_mask] = np.ma.masked pres[full_mask] = np.ma.masked pres_qc[full_mask] = np.ma.masked # Combine the QC codes qc = np.mean(np.vstack((temp_qc.compressed(), salt_qc.compressed(), pres_qc.compressed())).astype(int), axis=0) good_data = np.where(qc == 1) # Put everything together into individual observations time = np.resize(julian_day - time_delta, pres.shape[::-1]).T[~temp.mask][good_data] lat = np.resize(lat, pres.shape[::-1]).T[~temp.mask][good_data] lon = np.resize(lon, pres.shape[::-1]).T[~temp.mask][good_data] depth = -seapy.seawater.depth(pres.compressed()[good_data], lat) # Apply the limits temp = np.ma.masked_outside(temp.compressed()[good_data], self.temp_limits[0], self.temp_limits[1]) salt = np.ma.masked_outside(salt.compressed()[good_data], self.salt_limits[0], self.salt_limits[1]) data = [seapy.roms.obs.raw_data("TEMP", "CTD_ARGO", temp, None, self.temp_error), seapy.roms.obs.raw_data("SALT", "CTD_ARGO", salt, None, self.salt_error)] return seapy.roms.obs.gridder(self.grid, time, lon, lat, depth, data, self.dt, title)
def to_clim(src_file, dest_file, dest_grid=None, records=None, clobber=False, cdl=None, threads=2, reftime=None, nx=0, ny=0, weight=10, vmap=None, pmap=None): """ Given an model output file, create (if does not exit) a new ROMS climatology file using the given ROMS destination grid and interpolate the ROMS fields onto the new grid. If an existing destination file is given, it is interpolated onto the specified. Parameters ---------- src_file : string, Filename of src file to interpolate from dest_file : string, Name of desination file to write to dest_grid: (string or seapy.model.grid), optional: Name or instance of output definition records : numpy.ndarray, optional: Record indices to interpolate clobber: bool, optional If True, clobber any existing files and recreate. If False, use the existing file definition cdl: string, optional, Use the specified CDL file as the definition for the new netCDF file. threads : int, optional: number of processing threads reftime: datetime, optional: Reference time as the epoch for climatology file nx : float, optional: decorrelation length-scale for OA (same units as source data) ny : float, optional: decorrelation length-scale for OA (same units as source data) weight : int, optional: number of points to use in weighting matrix vmap : dictionary, optional mapping source and destination variables pmap : numpy.ndarray, optional: use the specified pmap rather than compute it Returns ------- pmap : ndarray the weighting matrix computed during the interpolation """ if dest_grid is not None: destg = seapy.model.asgrid(dest_grid) src_grid = seapy.model.asgrid(src_file) ncsrc = seapy.netcdf(src_file) src_ref, time = seapy.roms.get_reftime(ncsrc) if reftime is not None: src_ref = reftime records = np.arange(0, ncsrc.variables[time].shape[0]) \ if records is None else np.atleast_1d(records) ncout = seapy.roms.ncgen.create_clim(dest_file, eta_rho=destg.ln, xi_rho=destg.lm, s_rho=destg.n, reftime=src_ref, clobber=clobber, cdl=cdl, title="interpolated from " + src_file) src_time = netCDF4.num2date(ncsrc.variables[time][records], ncsrc.variables[time].units) ncout.variables["clim_time"][:] = netCDF4.date2num( src_time, ncout.variables["clim_time"].units) ncsrc.close() else: raise AttributeError( "you must supply a destination file or a grid to make the file") # Call the interpolation try: src_grid.set_east(destg.east()) pmap = __interp_grids(src_grid, destg, ncout, records=records, threads=threads, nx=nx, ny=ny, vmap=vmap, weight=weight, pmap=pmap) except TimeoutError: print("Timeout: process is hung, deleting output.") # Delete the output file os.remove(dest_file) finally: # Clean up ncout.close() return pmap
def gen_std_i(roms_file, std_file, std_window=5, pad=1, skip=30, fields=None): """ Create a std file for the given ocean fields. This std file can be used for initial conditions constraint in 4D-Var. This requires a long-term model spinup file from which to compute the standard deviation. Parameters ---------- roms_file: string or list of strings, The ROMS (history or average) file from which to compute the std. If it is a list of strings, a netCDF4.MFDataset is opened instead. std_file: string, The name of the file to store the standard deviations fields std_window: int, The size of the window (in number of records) to compute the std over pad: int, How much to pad each side of the window for overlap. For example, std_window=10 and pad=2 would give a total window of 14 with 2 records used in the prior window and 2 in the post window as well. skip: int, How many records to skip at the beginning of the file fields: list of str, The fields to compute std for. Default is to use the ROMS prognostic variables. Returns ------- None """ # Create the fields to process if fields is None: fields = set(seapy.roms.fields) # Open the ROMS info grid = seapy.model.asgrid(roms_file) nc = seapy.netcdf(roms_file) # Filter the fields for the ones in the ROMS file fields = set(nc.variables).intersection(fields) # Build the output file time_var = seapy.roms.get_timevar(nc) epoch = netCDF4.num2date(0, nc.variables[time_var].units) time = nc.variables[time_var][:] ncout = seapy.roms.ncgen.create_da_ini_std(std_file, eta_rho=grid.ln, xi_rho=grid.lm, s_rho=grid.n, reftime=epoch, title="std from " + str(roms_file)) grid.to_netcdf(ncout) # If there are any fields that are not in the standard output file, # add them to the output file for f in fields.difference(ncout.variables): ncout.createVariable(f, np.float32, ('ocean_time', "s_rho", "eta_rho", "xi_rho")) # Loop over the time with the variance window: for n, t in enumerate(seapy.progressbar.progress(np.arange(skip + pad, len(time) - std_window - pad, std_window))): idx = np.arange(t - pad, t + std_window + pad) ncout.variables[time_var][n] = np.mean(time[idx]) for v in fields: dat = nc.variables[v][idx, :].std(axis=0) dat[dat > 10] = 0.0 ncout.variables[v][n, :] = dat ncout.sync() ncout.close() nc.close()
def to_zgrid(roms_file, z_file, z_grid=None, depth=None, records=None, threads=2, reftime=None, nx=0, ny=0, weight=10, vmap=None, cdl=None, dims=2, pmap=None): """ Given an existing ROMS history or average file, create (if does not exit) a new z-grid file. Use the given z_grid or otherwise build one with the same horizontal extent and the specified depths and interpolate the ROMS fields onto the z-grid. Parameters ---------- roms_file : string, File name of src file to interpolate from z_file : string, Name of desination file to write to z_grid: (string or seapy.model.grid), optional: Name or instance of output definition depth: numpy.ndarray, optional: array of depths to use for z-level records : numpy.ndarray, optional: Record indices to interpolate threads : int, optional: number of processing threads reftime: datetime, optional: Reference time as the epoch for z-grid file nx : float, optional: decorrelation length-scale for OA (same units as source data) ny : float, optional: decorrelation length-scale for OA (same units as source data) weight : int, optional: number of points to use in weighting matrix vmap : dictionary, optional mapping source and destination variables cdl: string, optional, Use the specified CDL file as the definition for the new netCDF file. dims : int, optional number of dimensions to use for lat/lon arrays (default 2) pmap : numpy.ndarray, optional: use the specified pmap rather than compute it Returns ------- pmap : ndarray the weighting matrix computed during the interpolation """ roms_grid = seapy.model.asgrid(roms_file) ncroms = seapy.netcdf(roms_file) src_ref, time = seapy.roms.get_reftime(ncroms) if reftime is not None: src_ref = reftime records = np.arange(0, ncroms.variables[time].shape[0]) \ if records is None else np.atleast_1d(records) # Load the grid if z_grid is not None: z_grid = seapy.model.asgrid(z_grid) elif os.path.isfile(z_file): z_grid = seapy.model.asgrid(z_file) if not os.path.isfile(z_file): if z_grid is None: lat = roms_grid.lat_rho.shape[0] lon = roms_grid.lat_rho.shape[1] if depth is None: raise ValueError("depth must be specified") ncout = seapy.roms.ncgen.create_zlevel(z_file, lat, lon, len(depth), src_ref, "ROMS z-level", cdl=cdl, dims=dims) if dims == 1: ncout.variables["lat"][:] = roms_grid.lat_rho[:, 0] ncout.variables["lon"][:] = roms_grid.lon_rho[0, :] else: ncout.variables["lat"][:] = roms_grid.lat_rho ncout.variables["lon"][:] = roms_grid.lon_rho ncout.variables["depth"][:] = depth ncout.variables["mask"][:] = roms_grid.mask_rho ncout.sync() z_grid = seapy.model.grid(z_file) else: lat = z_grid.lat_rho.shape[0] lon = z_grid.lat_rho.shape[1] dims = z_grid.spatial_dims ncout = seapy.roms.ncgen.create_zlevel(z_file, lat, lon, len(z_grid.z), src_ref, "ROMS z-level", cdl=cdl, dims=dims) if dims == 1: ncout.variables["lat"][:] = z_grid.lat_rho[:, 0] ncout.variables["lon"][:] = z_grid.lon_rho[0, :] else: ncout.variables["lat"][:] = z_grid.lat_rho ncout.variables["lon"][:] = z_grid.lon_rho ncout.variables["depth"][:] = z_grid.z ncout.variables["mask"][:] = z_grid.mask_rho else: ncout = netCDF4.Dataset(z_file, "a") ncout.variables["time"][:] = netCDF4.date2num( netCDF4.num2date(ncroms.variables[time][records], ncroms.variables[time].units), ncout.variables["time"].units) ncroms.close() # Call the interpolation try: roms_grid.set_east(z_grid.east()) pmap = __interp_grids(roms_grid, z_grid, ncout, records=records, threads=threads, nx=nx, ny=ny, vmap=vmap, weight=weight, z_mask=True, pmap=pmap) except TimeoutError: print("Timeout: process is hung, deleting output.") # Delete the output file os.remove(z_file) finally: # Clean up ncout.close() return pmap
def to_grid(src_file, dest_file, dest_grid=None, records=None, threads=2, reftime=None, nx=0, ny=0, weight=10, vmap=None, pmap=None): """ Given an existing model file, create (if does not exit) a new ROMS history file using the given ROMS destination grid and interpolate the ROMS fields onto the new grid. If an existing destination file is given, it is interpolated onto the specified. Parameters ---------- src_file : string, Filename of src file to interpolate from dest_file : string, Name of desination file to write to dest_grid: (string or seapy.model.grid), optional: Name or instance of output definition records : numpy.ndarray, optional: Record indices to interpolate threads : int, optional: number of processing threads reftime: datetime, optional: Reference time as the epoch for ROMS file nx : float, optional: decorrelation length-scale for OA (same units as source data) ny : float, optional: decorrelation length-scale for OA (same units as source data) weight : int, optional: number of points to use in weighting matrix vmap : dictionary, optional mapping source and destination variables pmap : numpy.ndarray, optional: use the specified pmap rather than compute it Returns ------- pmap : ndarray the weighting matrix computed during the interpolation """ src_grid = seapy.model.asgrid(src_file) if dest_grid is not None: destg = seapy.model.asgrid(dest_grid) if not os.path.isfile(dest_file): ncsrc = seapy.netcdf(src_file) src_ref, time = seapy.roms.get_reftime(ncsrc) if reftime is not None: src_ref = reftime records = np.arange(0, ncsrc.variables[time].shape[0]) \ if records is None else np.atleast_1d(records) ncout = seapy.roms.ncgen.create_ini(dest_file, eta_rho=destg.eta_rho, xi_rho=destg.xi_rho, s_rho=destg.n, reftime=src_ref, title="interpolated from " + src_file) destg.to_netcdf(ncout) ncout.variables["ocean_time"][:] = netCDF4.date2num( netCDF4.num2date(ncsrc.variables[time][records], ncsrc.variables[time].units), ncout.variables["ocean_time"].units) ncsrc.close() if os.path.isfile(dest_file): ncout = netCDF4.Dataset(dest_file, "a") if dest_grid is None: destg = seapy.model.asgrid(dest_file) # Call the interpolation try: src_grid.set_east(destg.east()) pmap = __interp_grids(src_grid, destg, ncout, records=records, threads=threads, nx=nx, ny=ny, weight=weight, vmap=vmap, pmap=pmap) except TimeoutError: print("Timeout: process is hung, deleting output.") # Delete the output file os.remove(dest_file) finally: # Clean up ncout.close() return pmap
def __interp_grids(src_grid, child_grid, ncout, records=None, threads=2, nx=0, ny=0, weight=10, vmap=None, z_mask=False, pmap=None): """ internal method: Given a model file (average, history, etc.), interpolate the fields onto another gridded file. Parameters ---------- src_grid : seapy.model.grid data source (History, Average, etc. file) child_grid : seapy.model.grid output data grid ncout : netcdf output file [records] : array of the record indices to interpolate [threads] : number of processing threads [nx] : decorrelation length in grid-cells for x [ny] : decorrelation length in grid-cells for y [vmap] : variable name mapping [z_mask] : mask out depths in z-grids [pmap] : use the specified pmap rather than compute it Returns ------- None """ # If we don't have a variable map, then do a one-to-one mapping if vmap is None: vmap = dict() for k in seapy.roms.fields: vmap[k] = k # Generate a file to store the pmap information sname = getattr(src_grid, 'name', None) cname = getattr(child_grid, 'name', None) pmap_file = None if any(v is None for v in (sname, cname)) else \ sname + "_" + cname + "_pmap.npz" # Create or load the pmaps depending on if they exist if nx == 0: if hasattr(src_grid, "dm") and hasattr(child_grid, "dm"): nx = np.ceil(np.mean(src_grid.dm) / np.mean(child_grid.dm)) else: nx = 5 if ny == 0: if hasattr(src_grid, "dn") and hasattr(child_grid, "dn"): ny = np.ceil(np.mean(src_grid.dn) / np.mean(child_grid.dn)) else: ny = 5 if pmap is None: if pmap_file is not None and os.path.isfile(pmap_file): pmap = np.load(pmap_file) else: tmp = np.ma.masked_equal(src_grid.mask_rho, 0) tmp, pmaprho = seapy.oasurf(src_grid.lon_rho, src_grid.lat_rho, tmp, child_grid.lon_rho, child_grid.lat_rho, weight=weight, nx=nx, ny=ny) tmp = np.ma.masked_equal(src_grid.mask_u, 0) tmp, pmapu = seapy.oasurf(src_grid.lon_u, src_grid.lat_u, tmp, child_grid.lon_rho, child_grid.lat_rho, weight=weight, nx=nx, ny=ny) tmp = np.ma.masked_equal(src_grid.mask_v, 0) tmp, pmapv = seapy.oasurf(src_grid.lon_v, src_grid.lat_v, tmp, child_grid.lon_rho, child_grid.lat_rho, weight=weight, nx=nx, ny=ny) if pmap_file is not None: np.savez(pmap_file, pmaprho=pmaprho, pmapu=pmapu, pmapv=pmapv) pmap = {"pmaprho": pmaprho, "pmapu": pmapu, "pmapv": pmapv} # Get the time field ncsrc = seapy.netcdf(src_grid.filename) time = seapy.roms.get_timevar(ncsrc) # Interpolate the depths from the source to final grid src_depth = np.min(src_grid.depth_rho, 0) dst_depth = __interp2_thread(src_grid.lon_rho, src_grid.lat_rho, src_depth, child_grid.lon_rho, child_grid.lat_rho, pmap[ "pmaprho"], weight, nx, ny, child_grid.mask_rho) # Interpolate the scalar fields records = np.arange(0, ncsrc.variables[time].shape[0]) \ if records is None else np.atleast_1d(records) for src in vmap: dest = vmap[src] # Extra fields will probably be user tracers (biogeochemical) fld = seapy.roms.fields.get(dest, {"dims": 3}) # Only interpolate the fields we want in the destination if (dest not in ncout.variables) or ("rotate" in fld): continue if fld["dims"] == 2: # Compute the max number of hold in memory maxrecs = np.maximum(1, np.minimum(len(records), np.int(_max_memory / (child_grid.lon_rho.nbytes + src_grid.lon_rho.nbytes)))) for rn, recs in enumerate(seapy.chunker(records, maxrecs)): outr = np.s_[ rn * maxrecs:np.minimum((rn + 1) * maxrecs, len(records))] ndata = np.ma.array(Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory) (delayed(__interp2_thread)( src_grid.lon_rho, src_grid.lat_rho, ncsrc.variables[src][i, :, :], child_grid.lon_rho, child_grid.lat_rho, pmap["pmaprho"], weight, nx, ny, child_grid.mask_rho) for i in recs), copy=False) ncout.variables[dest][outr, :, :] = ndata ncout.sync() else: maxrecs = np.maximum(1, np.minimum( len(records), np.int(_max_memory / (child_grid.lon_rho.nbytes * child_grid.n + src_grid.lon_rho.nbytes * src_grid.n)))) for rn, recs in enumerate(seapy.chunker(records, maxrecs)): outr = np.s_[ rn * maxrecs:np.minimum((rn + 1) * maxrecs, len(records))] ndata = np.ma.array(Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory) (delayed(__interp3_thread)( src_grid.lon_rho, src_grid.lat_rho, src_grid.depth_rho, ncsrc.variables[src][i, :, :, :], child_grid.lon_rho, child_grid.lat_rho, child_grid.depth_rho, pmap["pmaprho"], weight, nx, ny, child_grid.mask_rho, up_factor=_up_scaling.get(dest, 1.0), down_factor=_down_scaling.get(dest, 1.0)) for i in recs), copy=False) if z_mask: __mask_z_grid(ndata, dst_depth, child_grid.depth_rho) ncout.variables[dest][outr, :, :, :] = ndata ncout.sync() # Rotate and Interpolate the vector fields. First, determine which # are the "u" and the "v" vmap fields try: velmap = { "u": list(vmap.keys())[list(vmap.values()).index("u")], "v": list(vmap.keys())[list(vmap.values()).index("v")]} except: warn("velocity not present in source file") return srcangle = src_grid.angle if src_grid.cgrid else None dstangle = child_grid.angle if child_grid.cgrid else None maxrecs = np.minimum(len(records), np.int(_max_memory / (2 * (child_grid.lon_rho.nbytes * child_grid.n + src_grid.lon_rho.nbytes * src_grid.n)))) for nr, recs in enumerate(seapy.chunker(records, maxrecs)): vel = Parallel(n_jobs=threads, verbose=2, max_nbytes=_max_memory)(delayed(__interp3_vel_thread)( src_grid.lon_rho, src_grid.lat_rho, src_grid.depth_rho, srcangle, ncsrc.variables[velmap["u"]][i, :, :, :], ncsrc.variables[velmap["v"]][i, :, :, :], child_grid.lon_rho, child_grid.lat_rho, child_grid.depth_rho, dstangle, pmap["pmaprho"], weight, nx, ny, child_grid.mask_rho) for i in recs) for j in range(len(vel)): vel_u = np.ma.array(vel[j][0], copy=False) vel_v = np.ma.array(vel[j][1], copy=False) if z_mask: __mask_z_grid(vel_u, dst_depth, child_grid.depth_rho) __mask_z_grid(vel_v, dst_depth, child_grid.depth_rho) if child_grid.cgrid: vel_u = seapy.model.rho2u(vel_u) vel_v = seapy.model.rho2v(vel_v) ncout.variables["u"][nr * maxrecs + j, :] = vel_u ncout.variables["v"][nr * maxrecs + j, :] = vel_v if "ubar" in ncout.variables: # Create ubar and vbar # depth = seapy.adddim(child_grid.depth_u, vel_u.shape[0]) ncout.variables["ubar"][nr * maxrecs + j, :] = \ np.sum(vel_u * child_grid.depth_u, axis=0) / \ np.sum(child_grid.depth_u, axis=0) if "vbar" in ncout.variables: # depth = seapy.adddim(child_grid.depth_v, vel_v.shape[0]) ncout.variables["vbar"][nr * maxrecs + j, :] = \ np.sum(vel_v * child_grid.depth_v, axis=0) / \ np.sum(child_grid.depth_v, axis=0) ncout.sync() # Return the pmap that was used return pmap
import sys import seapy import numpy as np try: infile = sys.argv[1] outfile = sys.argv[2] except: print("Usage: {:s} input_file output_file".format(sys.argv[0])) sys.exit() print("Convert {:s} to {:s}".format(infile, outfile)) maxrecs = 30 # Get the parameters inc = seapy.netcdf(infile) eta_rho = len(inc.dimensions['eta_rho']) xi_rho = len(inc.dimensions['xi_rho']) s_rho = len(inc.dimensions['s_rho']) epoch, tvar = seapy.roms.get_reftime(inc) # Create the new file onc = seapy.roms.ncgen.create_clim( outfile, eta_rho=eta_rho, xi_rho=xi_rho, s_rho=s_rho, reftime=epoch, clobber=True) # Save the times onc.variables['clim_time'][:] = inc.variables[tvar][:] ntimes = len(onc.dimensions['clim_time']) # Copy the variables for v in seapy.roms.fields:
def to_zgrid(roms_file, z_file, src_grid=None, z_grid=None, depth=None, records=None, threads=2, reftime=None, nx=0, ny=0, weight=10, vmap=None, cdl=None, dims=2, pmap=None): """ Given an existing ROMS history or average file, create (if does not exit) a new z-grid file. Use the given z_grid or otherwise build one with the same horizontal extent and the specified depths and interpolate the ROMS fields onto the z-grid. Parameters ---------- roms_file : string, File name of src file to interpolate from z_file : string, Name of desination file to write to src_grid : (string or seapy.model.grid), optional: Name or instance of source grid. If nothing is specified, derives grid from the roms_file z_grid: (string or seapy.model.grid), optional: Name or instance of output definition depth: numpy.ndarray, optional: array of depths to use for z-level records : numpy.ndarray, optional: Record indices to interpolate threads : int, optional: number of processing threads reftime: datetime, optional: Reference time as the epoch for z-grid file nx : float, optional: decorrelation length-scale for OA (same units as source data) ny : float, optional: decorrelation length-scale for OA (same units as source data) weight : int, optional: number of points to use in weighting matrix vmap : dictionary, optional mapping source and destination variables cdl: string, optional, Use the specified CDL file as the definition for the new netCDF file. dims : int, optional number of dimensions to use for lat/lon arrays (default 2) pmap : numpy.ndarray, optional: use the specified pmap rather than compute it Returns ------- pmap : ndarray the weighting matrix computed during the interpolation """ if src_grid is None: src_grid = seapy.model.asgrid(roms_file) else: src_grid = seapy.model.asgrid(src_grid) ncsrc = seapy.netcdf(roms_file) src_ref, time = seapy.roms.get_reftime(ncsrc) if reftime is not None: src_ref = reftime records = np.arange(0, ncsrc.variables[time].shape[0]) \ if records is None else np.atleast_1d(records) # Load the grid if z_grid is not None: z_grid = seapy.model.asgrid(z_grid) elif os.path.isfile(z_file): z_grid = seapy.model.asgrid(z_file) if not os.path.isfile(z_file): if z_grid is None: lat = src_grid.lat_rho.shape[0] lon = src_grid.lat_rho.shape[1] if depth is None: raise ValueError("depth must be specified") ncout = seapy.roms.ncgen.create_zlevel(z_file, lat, lon, len(depth), src_ref, "ROMS z-level", cdl=cdl, dims=dims) if dims == 1: ncout.variables["lat"][:] = src_grid.lat_rho[:, 0] ncout.variables["lon"][:] = src_grid.lon_rho[0, :] else: ncout.variables["lat"][:] = src_grid.lat_rho ncout.variables["lon"][:] = src_grid.lon_rho ncout.variables["depth"][:] = depth ncout.variables["mask"][:] = src_grid.mask_rho ncout.sync() z_grid = seapy.model.grid(z_file) else: lat = z_grid.lat_rho.shape[0] lon = z_grid.lat_rho.shape[1] dims = z_grid.spatial_dims ncout = seapy.roms.ncgen.create_zlevel(z_file, lat, lon, len(z_grid.z), src_ref, "ROMS z-level", cdl=cdl, dims=dims) if dims == 1: ncout.variables["lat"][:] = z_grid.lat_rho[:, 0] ncout.variables["lon"][:] = z_grid.lon_rho[0, :] else: ncout.variables["lat"][:] = z_grid.lat_rho ncout.variables["lon"][:] = z_grid.lon_rho ncout.variables["depth"][:] = z_grid.z ncout.variables["mask"][:] = z_grid.mask_rho else: ncout = netCDF4.Dataset(z_file, "a") ncout.variables["time"][:] = seapy.roms.date2num( seapy.roms.num2date(ncsrc, time, records), ncout, "time") # Call the interpolation try: src_grid.set_east(z_grid.east()) pmap = __interp_grids(src_grid, z_grid, ncsrc, ncout, records=records, threads=threads, nx=nx, ny=ny, vmap=vmap, weight=weight, z_mask=True, pmap=pmap) except TimeoutError: print("Timeout: process is hung, deleting output.") # Delete the output file os.remove(z_file) finally: # Clean up ncsrc.close() ncout.close() return pmap
def merge_files(obs_files, out_files, days, dt, limits=None, clobber=True): """ merge together a group of observation files into combined new files with observations that lie only within the corresponding dates Parameters ---------- obs_files : list, List of files to merge together (a single file will work, it will just be filtered by the dates) out_files : list or string, list of the filenames to create for each of the output periods. If a single string is given, the character '#' will be replaced by the starting time of the observation (e.g. out_files="out_#.nc" will become out_03234.nc) days : list of tuples, List of starting and ending day numbers for each cycle to process. The first value is the start day, the second is the end day. The number of tuples is the number of files to output. dt : float, Time separation of observations. Observations that are less than dt apart in time will be set to the same time. limits : dict, optional Set the limits of the grid points that observations are allowed within, {'north':i, 'south':i, 'east':i, 'west':i }. As obs near the boundaries are not advisable, this allows you to specify the valid grid range to accept obs within. clobber: bool, optional If True, output files are overwritten. If False, they are skipped. Returns ------- None Examples -------- Put together three files into 5 separate files in two day intervals from day 10 through day 20: >>> merge_files(["obs_1.nc", "obs_2.nc", "obs_3.nc"], "new_#.nc", [(i, i+2) for i in range(10, 20, 2)]) Put together same three files into 3 overlapping separate files in five day intervals with one overlapping day: >>> merge_files(["obs_1.nc", "obs_2.nc", "obs_3.nc"], "new_#.nc", [(i, i+5) for i in range(10, 20, 4)]) """ import re import os # Only unique files obs_files = set().union(seapy.flatten(obs_files)) outtime = False if isinstance(out_files, str): outtime = True time = re.compile('\#') # Go through the files to determine which periods they cover myobs = list() sdays = list() edays = list() for file in obs_files: nc = seapy.netcdf(file) fdays = nc.variables['survey_time'][:] nc.close() l = np.where(np.logical_and(fdays >= np.min(days), fdays <= np.max(days)))[0] if not l.size: continue myobs.append(file) sdays.append(fdays[0]) edays.append(fdays[-1]) sdays = np.asarray(sdays) edays = np.asarray(edays) # Loop over the dates in pairs for n, t in enumerate(seapy.progressbar.progress(days)): # Set output file name if outtime: outfile = time.sub("{:05d}".format(t[0]), out_files) else: outfile = out_files[n] if os.path.exists(outfile) and not clobber: continue # Find the files that cover the current period fidx = np.where(np.logical_and(sdays <= t[1], edays >= t[0]))[0] if not fidx.size: continue # Create new observations for this time period nobs = obs(myobs[fidx[0]]) l = np.where(np.logical_or(nobs.time < t[0], nobs.time > t[1])) nobs.delete(l) for idx in fidx[1:]: o = obs(myobs[idx]) l = np.where(np.logical_and(o.time >= t[0], o.time <= t[1])) nobs.add(o[l]) # Remove any limits if limits is not None: l = np.where(np.logical_or.reduce(( nobs.x < limits['west'], nobs.x > limits['east'], nobs.y < limits['south'], nobs.y > limits['north']))) nobs.delete(l) # Save out the new observations nobs.to_netcdf(outfile, dt=dt) pass
def gen_std_f(roms_file, std_file, records=None, fields=None): """ Create a std file for the given atmospheric forcing fields. This std file can be used for the forcing constraint in 4D-Var. This requires a long-term model spinup file from which to compute the standard deviation. Parameters ---------- roms_file: string or list of strings, The ROMS (history or average) file from which to compute the std. If it is a list of strings, a netCDF4.MFDataset is opened instead. std_file: string, The name of the file to store the standard deviations fields records: ndarray, List of records to perform the std over. These records are used to avoid the solar diurnal cycles in the fields. fields: list of str, The fields to compute std for. Default is to use the ROMS atmospheric variables (sustr, svstr, shflux, ssflux). Returns ------- None """ # Create the fields to process if fields is None: fields = set(["sustr", "svstr", "shflux", "ssflux"]) # Open the ROMS info grid = seapy.model.asgrid(roms_file) nc = seapy.netcdf(roms_file) # Filter the fields for the ones in the ROMS file fields = set(nc.variables).intersection(fields) # Build the output file time_var = seapy.roms.get_timevar(nc) epoch = netCDF4.num2date(0, nc.variables[time_var].units) time = nc.variables[time_var][:] ncout = seapy.roms.ncgen.create_da_frc_std(std_file, eta_rho=grid.ln, xi_rho=grid.lm, s_rho=grid.n, reftime=epoch, title="std from " + str(roms_file)) grid.to_netcdf(ncout) # Set the records if records is None: records = np.arange(len(time)) else: records = np.atleast_1d(records) records = records[records <= len(time)] # If there are any fields that are not part of the standard, add them # to the output file for f in fields.difference(ncout.variables): ncout.createVariable(f, np.float32, ('ocean_time', "eta_rho", "xi_rho")) # Loop over the time with the variance window: ncout.variables[time_var][:] = np.mean(time[records]) for v in fields: dat = nc.variables[v][records, :].std(axis=0) ncout.variables[v][0, :] = dat ncout.sync() ncout.close() nc.close()
def to_clim(src_file, dest_file, src_grid=None, dest_grid=None, records=None, clobber=False, cdl=None, threads=2, reftime=None, nx=0, ny=0, weight=10, vmap=None, pmap=None): """ Given an model output file, create (if does not exit) a new ROMS climatology file using the given ROMS destination grid and interpolate the ROMS fields onto the new grid. If an existing destination file is given, it is interpolated onto the specified. Parameters ---------- src_file : string, Filename of src file to interpolate from dest_file : string, Name of desination file to write to src_grid : (string or seapy.model.grid), optional: Name or instance of source grid. If nothing is specified, derives grid from the roms_file dest_grid: (string or seapy.model.grid), optional: Name or instance of output definition records : numpy.ndarray, optional: Record indices to interpolate clobber: bool, optional If True, clobber any existing files and recreate. If False, use the existing file definition cdl: string, optional, Use the specified CDL file as the definition for the new netCDF file. threads : int, optional: number of processing threads reftime: datetime, optional: Reference time as the epoch for climatology file nx : float, optional: decorrelation length-scale for OA (same units as source data) ny : float, optional: decorrelation length-scale for OA (same units as source data) weight : int, optional: number of points to use in weighting matrix vmap : dictionary, optional mapping source and destination variables pmap : numpy.ndarray, optional: use the specified pmap rather than compute it Returns ------- pmap : ndarray the weighting matrix computed during the interpolation """ if dest_grid is not None: destg = seapy.model.asgrid(dest_grid) if src_grid is None: src_grid = seapy.model.asgrid(src_file) else: src_grid = seapy.model.asgrid(src_grid) ncsrc = seapy.netcdf(src_file) src_ref, time = seapy.roms.get_reftime(ncsrc) if reftime is not None: src_ref = reftime records = np.arange(0, ncsrc.variables[time].shape[0]) \ if records is None else np.atleast_1d(records) ncout = seapy.roms.ncgen.create_clim(dest_file, eta_rho=destg.ln, xi_rho=destg.lm, s_rho=destg.n, reftime=src_ref, clobber=clobber, cdl=cdl, title="interpolated from " + src_file) src_time = seapy.roms.num2date(ncsrc, time, records) ncout.variables["clim_time"][:] = seapy.roms.date2num( src_time, ncout, "clim_time") else: raise AttributeError( "you must supply a destination file or a grid to make the file") # Call the interpolation try: src_grid.set_east(destg.east()) pmap = __interp_grids(src_grid, destg, ncsrc, ncout, records=records, threads=threads, nx=nx, ny=ny, vmap=vmap, weight=weight, pmap=pmap) except TimeoutError: print("Timeout: process is hung, deleting output.") # Delete the output file os.remove(dest_file) finally: # Clean up ncsrc.close() ncout.close() return pmap
def gen_std_i(roms_file, std_file, std_window=5, pad=1, skip=30, fields=None): """ Create a std file for the given ocean fields. This std file can be used for initial conditions constraint in 4D-Var. This requires a long-term model spinup file from which to compute the standard deviation. Parameters ---------- roms_file: string or list of strings, The ROMS (history or average) file from which to compute the std. If it is a list of strings, a netCDF4.MFDataset is opened instead. std_file: string, The name of the file to store the standard deviations fields std_window: int, The size of the window (in number of records) to compute the std over pad: int, How much to pad each side of the window for overlap. For example, std_window=10 and pad=2 would give a total window of 14 with 2 records used in the prior window and 2 in the post window as well. skip: int, How many records to skip at the beginning of the file fields: list of str, The fields to compute std for. Default is to use the ROMS prognostic variables. Returns ------- None """ # Create the fields to process if fields is None: fields = set(seapy.roms.fields) # Open the ROMS info grid = seapy.model.asgrid(roms_file) nc = seapy.netcdf(roms_file) # Filter the fields for the ones in the ROMS file fields = set(nc.variables).intersection(fields) # Build the output file epoch, time_var = seapy.roms.get_reftime(nc) time = nc.variables[time_var][:] ncout = seapy.roms.ncgen.create_da_ini_std(std_file, eta_rho=grid.ln, xi_rho=grid.lm, s_rho=grid.n, reftime=epoch, title="std from " + str(roms_file)) grid.to_netcdf(ncout) # If there are any fields that are not in the standard output file, # add them to the output file for f in fields.difference(ncout.variables): ncout.createVariable(f, np.float32, ('ocean_time', "s_rho", "eta_rho", "xi_rho")) # Loop over the time with the variance window: for n, t in enumerate( seapy.progressbar.progress( np.arange(skip + pad, len(time) - std_window - pad, std_window))): idx = np.arange(t - pad, t + std_window + pad) ncout.variables[time_var][n] = np.mean(time[idx]) for v in fields: dat = nc.variables[v][idx, :].std(axis=0) dat[dat > 10] = 0.0 ncout.variables[v][n, :] = dat ncout.sync() ncout.close() nc.close()
def _initfile(self): """ Using an input file, try to load as much information as can be found in the given file. Parameters ---------- None Returns ------- None : sets attributes in grid """ # Define a dictionary to go through and convert netcdf variables # to internal class attributes gvars = { "lat_rho": ["lat_rho", "lat", "latitude", "y_rho", "geolat_t"], "lon_rho": ["lon_rho", "lon", "longitude", "x_rho", "geolon_t"], "lat_u": ["lat_u", "y_u", "geolat_u"], "lon_u": ["lon_u", "x_u", "geolon_u"], "lat_v": ["lat_v", "y_v", "geolat_u"], "lon_v": ["lon_v", "x_v", "geolon_u"], "mask_rho": ["mask_rho", "mask"], "mask_u": ["mask_u"], "mask_v": ["mask_v"], "angle": ["angle"], "h": ["h"], "n": ["n"], "theta_s": ["theta_s"], "theta_b": ["theta_b"], "tcline": ["tcline"], "hc": ["hc"], "vtransform": ["vtransform"], "vstretching": ["vstretching"], "s_rho": ["s_rho"], "cs_r": ["cs_r"], "f": ["f"], "pm": ["pm"], "pn": ["pn"], "z": ["z", "depth", "lev", "st_ocean"], "wtype_grid": ["mask_rho"], "rdrag": ["rdrag"], "rdrag2": ["rdrag2"], "diff_factor": ["diff_factor"], "visc_factor": ["visc_factor"] } # Open the file close = False if self._nc is None: close = True self._nc = seapy.netcdf(self.filename) try: self.name = re.search("[^\.]*", os.path.basename(self.filename)).group() except: self.name = "untitled" self.key = {} ncvars = {v.lower(): v for v in self._nc.variables.keys()} for var in gvars: for inp in gvars[var]: if inp in ncvars: self.key[var] = inp self.__dict__[var] = self._nc.variables[ncvars[inp]][:] break if close: # Close the file self._nc.close() self._nc = None
def gen_std_f(roms_file, std_file, records=None, fields=None): """ Create a std file for the given atmospheric forcing fields. This std file can be used for the forcing constraint in 4D-Var. This requires a long-term model spinup file from which to compute the standard deviation. Parameters ---------- roms_file: string or list of strings, The ROMS (history or average) file from which to compute the std. If it is a list of strings, a netCDF4.MFDataset is opened instead. std_file: string, The name of the file to store the standard deviations fields records: ndarray, List of records to perform the std over. These records are used to avoid the solar diurnal cycles in the fields. fields: list of str, The fields to compute std for. Default is to use the ROMS atmospheric variables (sustr, svstr, shflux, ssflux). Returns ------- None """ # Create the fields to process if fields is None: fields = set(["sustr", "svstr", "shflux", "ssflux"]) # Open the ROMS info grid = seapy.model.asgrid(roms_file) nc = seapy.netcdf(roms_file) # Filter the fields for the ones in the ROMS file fields = set(nc.variables).intersection(fields) # Build the output file epoch, time_var = seapy.roms.get_reftime(nc) time = nc.variables[time_var][:] ncout = seapy.roms.ncgen.create_da_frc_std(std_file, eta_rho=grid.ln, xi_rho=grid.lm, s_rho=grid.n, reftime=epoch, title="std from " + str(roms_file)) grid.to_netcdf(ncout) # Set the records if records is None: records = np.arange(len(time)) else: records = np.atleast_1d(records) records = records[records <= len(time)] # If there are any fields that are not part of the standard, add them # to the output file for f in fields.difference(ncout.variables): ncout.createVariable(f, np.float32, ('ocean_time', "eta_rho", "xi_rho")) # Loop over the time with the variance window: ncout.variables[time_var][:] = np.mean(time[records]) for v in fields: dat = nc.variables[v][records, :].std(axis=0) ncout.variables[v][0, :] = dat ncout.sync() ncout.close() nc.close()
def gen_direct_forcing(his_file, frc_file, cdl=None): """ Generate a direct forcing file from a history (or other ROMS output) file. It requires that sustr, svstr, shflux, and ssflux (or swflux) with salt be available. This will generate a forcing file that contains: sustr, svstr, swflux, and ssflux. Parameters ---------- his_file: string, The ROMS history (or other) file(s) (can use wildcards) that contains the fields to make forcing from frc_file: string, The output forcing file cdl: string, optional, Use the specified CDL file as the definition for the new netCDF file. Returns ------- None: Generates an output file of bulk forcings """ import os infile = seapy.netcdf(his_file) ref, _ = seapy.roms.get_reftime(infile) # Create the output file nc = seapy.roms.ncgen.create_frc_direct(frc_file, eta_rho=infile.dimensions[ 'eta_rho'].size, xi_rho=infile.dimensions[ 'xi_rho'].size, reftime=ref, clobber=True, title="Forcing from " + os.path.basename(his_file), cdl=cdl) # Copy the data over time = seapy.roms.num2date(infile, 'ocean_time') nc.variables['frc_time'][:] = seapy.roms.date2num(time, nc, 'frc_time') for x in seapy.progressbar.progress(seapy.chunker(range(len(time)), 1000)): nc.variables['SSS'][x, :, :] = seapy.convolve_mask( infile.variables['salt'][x, -1, :, :], copy=False) if 'EminusP' in infile.variables: nc.variables['swflux'][x, :, :] = seapy.convolve_mask( infile.variables['EminusP'][x, :, :], copy=False) * 86400 elif 'swflux' in infile.variables: nc.variables['swflux'][x, :, :] = seapy.convolve_mask( infile.variables['swflux'][x, :, :], copy=False) else: nc.variables['swflux'][x, :, :] = seapy.convolve_mask( infile.variables['ssflux'][x, :, :] / nc.variables['SSS'][x, :, :], copy=False) nc.sync() for f in ("sustr", "svstr", "shflux", "swrad"): if f in infile.variables: nc.variables[f][x, :, :] = seapy.convolve_mask( infile.variables[f][x, :, :], copy=False) nc.sync() for f in ("lat_rho", "lat_u", "lat_v", "lon_rho", "lon_u", "lon_v"): if f in infile.variables: nc.variables[f][:] = infile.variables[f][:] nc.close()