def _extrap_model(config, modelFolder): resExtrap = get_res(config, extrap=True) resFinal = get_res(config, extrap=False) hres = get_horiz_res(config) modelName = config.get('model', 'name') inFileName = '{}/{}_temperature_{}.nc'.format(modelFolder, modelName, resExtrap) bedMaskFileName = '{}/bed_mask_{}.nc'.format(modelFolder, resExtrap) bedFileName = 'bedmap2/bedmap2_{}.nc'.format(hres) basinNumberFileName = 'imbie/basinNumbers_{}.nc'.format(hres) make_3D_bed_mask(inFileName, bedMaskFileName, bedFileName) for fieldName in ['temperature', 'salinity']: inFileName = '{}/{}_{}_{}.nc'.format(modelFolder, modelName, fieldName, resExtrap) outFileName = '{}/{}_{}_{}_extrap_horiz.nc'.format( modelFolder, modelName, fieldName, resExtrap) progressDir = '{}/progress_{}'.format(modelFolder, fieldName) matrixDir = '{}/matrices'.format(modelName.lower()) extrap_horiz(config, inFileName, outFileName, fieldName, bedFileName, basinNumberFileName, bedMaskFileName, progressDir, matrixDir) for fieldName in ['temperature', 'salinity']: inFileName = '{}/{}_{}_{}_extrap_horiz.nc'.format( modelFolder, modelName, fieldName, resExtrap) outFileName = '{}/{}_{}_{}_extrap_vert.nc'.format( modelFolder, modelName, fieldName, resExtrap) extrap_vert(config, inFileName, outFileName, fieldName) inFileNames = {} outFileNames = {} for fieldName in ['temperature', 'salinity']: inFileNames[fieldName] = '{}/{}_{}_{}_extrap_vert.nc'.format( modelFolder, modelName, fieldName, resExtrap) outFileNames[fieldName] = '{}/{}_{}_{}_extrap_vert.nc'.format( modelFolder, modelName, fieldName, resFinal) remap_vertical(config, inFileNames, outFileNames, extrap=False) for fieldName in ['temperature', 'salinity']: inFileName = '{}/{}_{}_{}_extrap_vert.nc'.format( modelFolder, modelName, fieldName, resFinal) outFileName = '{}/{}_{}_{}.nc'.format(modelFolder, modelName, fieldName, resFinal) progressDir = '{}/progress_{}'.format(modelFolder, fieldName) matrixDir = '{}/matrices'.format(modelName.lower()) extrap_grounded_above_sea_level(config, inFileName, outFileName, fieldName, progressDir, matrixDir)
def _compute_climatology(config): resFinal = get_res(config, extrap=False) modelName = config.get('model', 'name') firstTIndex = config.getint('climatology', 'firstTIndex') lastTIndex = config.getint('climatology', 'lastTIndex') baseFolder = modelName.lower() inFolder = '{}/{}'.format(baseFolder, config.get('climatology', 'outFolder')) outFolder = '{}/{}'.format(baseFolder, config.get('climatology', 'folder')) try: os.makedirs(outFolder) except OSError: pass print(' Computing present-day climatology...') for fieldName in ['temperature', 'salinity']: inFileName = '{}/{}_{}_{}.nc'.format(inFolder, modelName, fieldName, resFinal) outFileName = '{}/{}_{}_{}.nc'.format(outFolder, modelName, fieldName, resFinal) if os.path.exists(outFileName): print(' {} already exists.'.format(outFileName)) continue print(' {}'.format(outFileName)) ds = xarray.open_dataset(inFileName) ds = ds.isel(time=slice(firstTIndex, lastTIndex + 1)) with warnings.catch_warnings(): warnings.simplefilter("ignore", category=RuntimeWarning) ds = ds.mean(dim='time') ds.to_netcdf(outFileName)
def _add_anomaly_to_obs(config): resFinal = get_res(config, extrap=False) modelName = config.get('model', 'name') baseFolder = modelName.lower() inFolder = '{}/{}'.format(baseFolder, config.get('anomaly', 'folder')) outFolder = '{}/{}'.format(baseFolder, config.get('anomaly', 'obsFolder')) try: os.makedirs(outFolder) except OSError: pass print(' Adding observational climatology to the anomaly...') for fieldName in ['temperature', 'salinity']: obsFileName = \ 'obs/obs_{}_1995-2017_{}.nc'.format(fieldName, resFinal) inFileName = '{}/{}_{}_{}.nc'.format(inFolder, modelName, fieldName, resFinal) outFileName = '{}/{}_{}_{}.nc'.format(outFolder, modelName, fieldName, resFinal) if os.path.exists(outFileName): print(' {} already exists.'.format(outFileName)) continue print(' {}'.format(outFileName)) ds = xarray.open_dataset(inFileName) dsObs = xarray.open_dataset(obsFileName) attrs = ds[fieldName].attrs ds[fieldName] = ds[fieldName] + dsObs[fieldName] ds[fieldName].attrs = attrs ds.to_netcdf(outFileName)
def _compute_anomaly(config): resFinal = get_res(config, extrap=False) modelName = config.get('model', 'name') baseFolder = modelName.lower() inFolder = '{}/{}'.format(baseFolder, config.get('combine', 'outFolder')) climFolder = '{}/{}'.format(baseFolder, config.get('climatology', 'folder')) outFolder = '{}/{}'.format(baseFolder, config.get('anomaly', 'folder')) try: os.makedirs(outFolder) except OSError: pass print(' Computing anomaly from present-day...') for fieldName in ['temperature', 'salinity']: inFileName = '{}/{}_{}_{}.nc'.format(inFolder, modelName, fieldName, resFinal) climFileName = '{}/{}_{}_{}.nc'.format(climFolder, modelName, fieldName, resFinal) outFileName = '{}/{}_{}_{}.nc'.format(outFolder, modelName, fieldName, resFinal) if os.path.exists(outFileName): print(' {} already exists.'.format(outFileName)) continue print(' {}'.format(outFileName)) ds = xarray.open_dataset(inFileName) dsClim = xarray.open_dataset(climFileName) attrs = ds[fieldName].attrs ds[fieldName] = ds[fieldName] - dsClim[fieldName] ds[fieldName].attrs = attrs ds.to_netcdf(outFileName)
def _combine_obs(config, decades): print(' Combining observations...') res = get_res(config, extrap=True) for fieldName in ['temperature', 'salinity']: outFileName = 'obs/obs_{}_{}_{}.nc'.format(fieldName, decades, res) if os.path.exists(outFileName): continue datasets = {} datasets['woa'] = xarray.open_dataset('woa/woa_{}_{}_{}.nc'.format( fieldName, decades, res)) datasets['meop'] = xarray.open_dataset('meop/meop_{}_{}.nc'.format( fieldName, res)) datasets['en4'] = xarray.open_dataset('en4/en4_{}_{}_{}.nc'.format( fieldName, decades, res)) ds = datasets['en4'].drop(fieldName) nx = ds.sizes['x'] ny = ds.sizes['y'] nz = ds.sizes['z'] field = numpy.zeros((nz, ny, nx)) weights = numpy.zeros(field.shape) for name in datasets: localField = datasets[name][fieldName].values mask = numpy.isfinite(localField) field[mask] += localField[mask] weights[mask] += 1. mask = weights > 0 field[mask] /= weights[mask] field[numpy.logical_not(mask)] = numpy.nan ds[fieldName] = (('z', 'y', 'x'), field) ds[fieldName].attrs = datasets['en4'][fieldName].attrs ds.to_netcdf(outFileName) print(' Done.')
def _combine_model_output(config, section): resFinal = get_res(config, extrap=False) modelName = config.get('model', 'name') folders = config.get(section, 'folders') combineDim = config.get(section, 'dim') baseFolder = modelName.lower() outFolder = '{}/{}'.format(baseFolder, config.get(section, 'outFolder')) folders = ['{}/{}'.format(baseFolder, folder.strip()) for folder in folders.split(',')] try: os.makedirs(outFolder) except OSError: pass print(' Combining model results into a single data set...') for fieldName in ['temperature', 'salinity']: fileNames = ['{}/{}_{}_{}.nc'.format( folder, modelName, fieldName, resFinal) for folder in folders] outFileName = '{}/{}_{}_{}.nc'.format( outFolder, modelName, fieldName, resFinal) if os.path.exists(outFileName): print(' {} already exists.'.format(outFileName)) continue print(' {}'.format(outFileName)) ds = xarray.open_mfdataset(fileNames, concat_dim=combineDim) for coord in ['lat', 'lon']: if coord in ds.coords and 'time' in ds.coords[coord].dims: ds.coords[coord] = ds.coords[coord].isel(time=0, drop=True) ds.to_netcdf(outFileName)
def process_en4(config, startYear, endYear): ''' Download EN4 temperature and salinity, and bin them on the ISMIP6 grid ''' try: os.makedirs('en4/zips') except OSError: pass print('Processing UK Met Office EN4...') if not os.path.exists('en4/profiles'): baseURL = 'https://www.metoffice.gov.uk/hadobs/en4/data/en4-2-1/' fileNames = [ 'EN.4.2.1.profiles.g10.{}.zip'.format(year) for year in range(startYear, endYear + 1) ] download_files(fileNames, baseURL, 'en4/zips') try: os.makedirs('en4/profiles') except OSError: pass print(' Decompressing EN4 data...') widgets = [ ' ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA() ] bar = progressbar.ProgressBar(widgets=widgets, maxval=len(fileNames)).start() for index, fileName in enumerate(fileNames): with zipfile.ZipFile('en4/zips/{}'.format(fileName), 'r') as f: f.extractall('en4/profiles') bar.update(index + 1) bar.finish() res = get_res(config) tempFileName = 'en4/en4_temperature_{}-{}_{}.nc'.format( startYear, endYear, res) salinFileName = 'en4/en4_salinity_{}-{}_{}.nc'.format( startYear, endYear, res) if not os.path.exists(salinFileName): dsSalinity = _bin_en4(config, 'PSAL', 'salinity', startYear, endYear) dsSalinity.to_netcdf(salinFileName) dsSalinity.close() if not os.path.exists(tempFileName): dsSalinity = xarray.open_dataset(salinFileName) dsPotTemp = _bin_en4(config, 'POTM', 'temperature', startYear, endYear) dsTemp = potential_to_in_situ_temperature(dsPotTemp, dsSalinity) dsTemp.to_netcdf(tempFileName) print('Done.')
def _compute_thermal_driving(config): resFinal = get_res(config, extrap=False) modelName = config.get('model', 'name') subfolder = config.get('anomaly', 'obsFolder') modelFolder = '{}/{}'.format(modelName.lower(), subfolder) tempFileName = '{}/{}_temperature_{}.nc'.format(modelFolder, modelName, resFinal) salinFileName = '{}/{}_salinity_{}.nc'.format(modelFolder, modelName, resFinal) outFileName = '{}/{}_thermal_forcing_{}.nc'.format(modelFolder, modelName, resFinal) compute_thermal_forcing(tempFileName, salinFileName, outFileName)
def rignot_to_ismip6_grid(config): if not config.getboolean('rignot', 'remap'): return try: os.makedirs('rignot') except OSError: pass res = get_res(config) inFileName = config.get('rignot', 'fileName') outGridFileName = 'ismip6/{}_grid.nc'.format(res) outFileName = 'rignot/rignot_melt_rates_{}.nc'.format(res) _remap(inFileName, outGridFileName, outFileName, res)
def _remap(config, decades): res = get_res(config) hres = get_horiz_res(config) bothExist = True for fieldName in ['temperature', 'salinity']: outFileName = 'woa/woa_{}_{}_{}.nc'.format(fieldName, decades, res) if not os.path.exists(outFileName): bothExist = False break if bothExist: return print(' Remapping to {} grid...'.format(res)) for fieldName in ['temperature', 'salinity']: inFileName = 'woa/woa18_{}_{}_interp_z.nc'.format(decades, fieldName) outGridFileName = 'ismip6/{}_grid.nc'.format(hres) outFileName = 'woa/woa_{}_{}_{}.nc'.format(fieldName, decades, res) if os.path.exists(outFileName): continue print(' {}'.format(outFileName)) varName = fieldName inDescriptor = get_lat_lon_descriptor(inFileName) outDescriptor = get_antarctic_descriptor(outGridFileName) mappingFileName = 'woa/map_{}_to_{}.nc'.format(inDescriptor.meshName, outDescriptor.meshName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear') ds = xarray.open_dataset(inFileName) ds = ds.rename({varName: fieldName}) dsOut = remapper.remap(ds, renormalizationThreshold=0.1) for attrName in ['units', 'standard_name', 'long_name']: dsOut[fieldName].attrs[attrName] = ds[fieldName].attrs[attrName] dsOut.z.attrs = ds.z.attrs dsOut.to_netcdf(outFileName)
def process_obs(config): ''' process and combine WOA, MEOP, EN4 obs and extrapolate into regions with missing data ''' if not config.getboolean('observations', 'compute'): return try: os.makedirs('obs') except OSError: pass resFinal = get_res(config, extrap=False) startYear = 1995 endYear = 2017 decades = '{:04d}-{:04d}'.format(startYear, endYear) process_woa(config, decades) process_meop(config) process_en4(config, startYear, endYear) print('Combining and extrapolating observations...') _combine_obs(config, decades) extrap.extrap_obs(config, decades) tempFileName = \ 'obs/obs_temperature_{}_{}.nc'.format(decades, resFinal) salinFileName = \ 'obs/obs_salinity_{}_{}.nc'.format(decades, resFinal) outFileName = \ 'obs/obs_thermal_forcing_{}_{}.nc'.format(decades, resFinal) compute_thermal_forcing(tempFileName, salinFileName, outFileName) print(' Done.')
def extrap_obs(config, decades): resExtrap = get_res(config, extrap=True) resFinal = get_res(config, extrap=False) hres = get_horiz_res(config) inFileName = f'obs/obs_temperature_{decades}_{resExtrap}.nc' bedMaskFileName = f'obs/bed_mask_{resExtrap}.nc' bedFileName = f'bedmap2/bedmap2_{hres}.nc' basinNumberFileName = f'imbie/basinNumbers_{hres}.nc' make_3D_bed_mask(inFileName, bedMaskFileName, bedFileName) matrixDirs = dict() progressDirs = dict() for fieldName in ['temperature', 'salinity']: progressDirs[fieldName] = f'obs/progress_{fieldName}' matrixDirs[fieldName] = f'obs/matrices_{fieldName}' for fieldName in ['temperature', 'salinity']: progressDir = progressDirs[fieldName] matrixDir = matrixDirs[fieldName] inFileName = f'{progressDir}/obs_{fieldName}_{decades}_{resExtrap}.nc' outFileName = f'{progressDir}/' \ f'obs_{fieldName}_{decades}_{resExtrap}_extrap_horiz.nc' extrap_horiz(config, inFileName, outFileName, fieldName, bedFileName, basinNumberFileName, bedMaskFileName, progressDir, matrixDir) for fieldName in ['temperature', 'salinity']: progressDir = progressDirs[fieldName] inFileName = f'{progressDir}/' \ f'obs_{fieldName}_{decades}_{resExtrap}_extrap_horiz.nc' outFileName = f'{progressDir}/' \ f'obs_{fieldName}_{decades}_{resExtrap}_extrap_vert.nc' extrap_vert(config, inFileName, outFileName, fieldName) tempFileName = \ f'{progressDir}/obs_temperature_{decades}_{resExtrap}_extrap_vert.nc' salinFileName = \ f'{progressDir}/obs_salinity_{decades}_{resExtrap}_extrap_vert.nc' outFileName = \ f'{progressDir}/' \ f'obs_thermal_forcing_{decades}_{resExtrap}_extrap_vert.nc' compute_thermal_forcing(tempFileName, salinFileName, outFileName) inFileNames = {} outFileNames = {} for fieldName in ['temperature', 'salinity']: progressDir = progressDirs[fieldName] inFileNames[fieldName] = \ f'{progressDir}/' \ f'obs_{fieldName}_{decades}_{resExtrap}_extrap_vert.nc' outFileNames[fieldName] = \ f'{progressDir}/' \ f'obs_{fieldName}_{decades}_{resFinal}_extrap_vert.nc' remap_vertical(config, inFileNames, outFileNames, extrap=False) for fieldName in ['temperature', 'salinity']: progressDir = progressDirs[fieldName] matrixDir = matrixDirs[fieldName] inFileName = f'{progressDir}/' \ f'obs_{fieldName}_{decades}_{resFinal}_extrap_vert.nc' outFileName = f'obs/obs_{fieldName}_{decades}_{resFinal}.nc' extrap_grounded_above_sea_level(config, inFileName, outFileName, fieldName, progressDir, matrixDir)
def _remap(config, modelFolder): res = get_res(config) hres = get_horiz_res(config) modelName = config.get('model', 'name') inFileNames = {} outFileNames = {} bothExist = True for fieldName in ['temperature', 'salinity']: inFileNames[fieldName] = \ '{}/{}_{}_interp_z.nc'.format(modelFolder, modelName, fieldName) outFileNames[fieldName] = \ '{}/{}_{}_{}.nc'.format(modelFolder, modelName, fieldName, res) if not os.path.exists(outFileNames[fieldName]): bothExist = False if bothExist: return print(' Remapping to {} grid...'.format(res)) for fieldName in inFileNames: inFileName = inFileNames[fieldName] outFileName = outFileNames[fieldName] if os.path.exists(outFileName): continue outGridFileName = 'ismip6/{}_grid.nc'.format(hres) print(' {}'.format(outFileName)) progressDir = '{}/progress_remap_{}'.format(modelFolder, fieldName) try: os.makedirs(progressDir) except OSError: pass ds = xarray.open_dataset(inFileName) if len(ds.lon.dims) == 1: inDescriptor = LatLonGridDescriptor.read( inFileName, latVarName='lat', lonVarName='lon') else: assert(len(ds.lon.dims) == 2) inDescriptor = LatLon2DGridDescriptor.read( inFileName, latVarName='lat', lonVarName='lon') inDescriptor.regional = True outDescriptor = get_antarctic_descriptor(outGridFileName) mappingFileName = '{}/map_{}_to_{}.nc'.format( modelName.lower(), inDescriptor.meshName, outDescriptor.meshName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear') ds = ds.drop(['lat', 'lon']) nt = ds.sizes['time'] widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] bar = progressbar.ProgressBar(widgets=widgets, maxval=nt).start() for tIndex in range(nt): progressFileName = '{}/{}_t_{}.nc'.format( progressDir, modelName, tIndex) if os.path.exists(progressFileName): bar.update(tIndex+1) continue dsIn = ds.isel(time=tIndex) dsOut = remapper.remap(dsIn, renormalizationThreshold=0.1) dsOut = dsOut.transpose('z', 'y', 'x') for attrName in ['units', 'standard_name', 'long_name']: if attrName in ds[fieldName].attrs: dsOut[fieldName].attrs[attrName] = \ ds[fieldName].attrs[attrName] dsOut.z.attrs = ds.z.attrs dsOut.to_netcdf(progressFileName) bar.update(tIndex+1) bar.finish() dsOut = xarray.open_mfdataset( '{}/{}_t_*.nc'.format(progressDir, modelName), concat_dim='time') dsOut['z_bnds'] = ds.z_bnds dsOut.to_netcdf(outFileName)
def _extrap_model(config, modelFolder): resExtrap = get_res(config, extrap=True) resFinal = get_res(config, extrap=False) hres = get_horiz_res(config) modelName = config.get('model', 'name') fields = config.get('model', 'fields') fields = fields.replace(',', ' ').split() basin = config.get('model', 'basin') combineBasins = config.getboolean('model', 'combineBasins') inFileName = f'{modelFolder}/remap/{modelName}_temperature_{resExtrap}.nc' bedMaskFileName = f'{modelFolder}/bed_mask_{resExtrap}.nc' bedFileName = f'bedmap2/bedmap2_{hres}.nc' basinNumberFileName = f'imbie/basinNumbers_{hres}.nc' make_3D_bed_mask(inFileName, bedMaskFileName, bedFileName) matrixDir = os.path.join(modelName.lower(), 'matrices') progressDirs = dict() for fieldName in fields: progressDirs[fieldName] = f'{modelFolder}/progress_{fieldName}' for fieldName in fields: progressDir = progressDirs[fieldName] inFileName = f'{modelFolder}/remap/' \ f'{modelName}_{fieldName}_{resExtrap}.nc' outFileName = f'{progressDir}/' \ f'{modelName}_{fieldName}_{resExtrap}_extrap_horiz.nc' extrap_horiz(config, inFileName, outFileName, fieldName, bedFileName, basinNumberFileName, bedMaskFileName, progressDir, matrixDir, basin=basin, combine=combineBasins) if not combineBasins: return for fieldName in fields: progressDir = progressDirs[fieldName] inFileName = f'{progressDir}/' \ f'{modelName}_{fieldName}_{resExtrap}_extrap_horiz.nc' outFileName = f'{progressDir}/' \ f'{modelName}_{fieldName}_{resExtrap}_extrap_vert.nc' extrap_vert(config, inFileName, outFileName, fieldName) inFileNames = {} outFileNames = {} for fieldName in fields: progressDir = progressDirs[fieldName] inFileNames[fieldName] = \ f'{progressDir}/' \ f'{modelName}_{fieldName}_{resExtrap}_extrap_vert.nc' outFileNames[fieldName] = \ f'{progressDir}/' \ f'{modelName}_{fieldName}_{resFinal}_extrap_vert.nc' remap_vertical(config, inFileNames, outFileNames, extrap=False) for fieldName in fields: progressDir = progressDirs[fieldName] inFileName = f'{progressDir}/' \ f'{modelName}_{fieldName}_{resFinal}_extrap_vert.nc' outFileName = f'{modelFolder}/{modelName}_{fieldName}_{resFinal}.nc' extrap_grounded_above_sea_level(config, inFileName, outFileName, fieldName, progressDir, matrixDir)
def extrap_obs(config, decades): resExtrap = get_res(config, extrap=True) resFinal = get_res(config, extrap=False) hres = get_horiz_res(config) inFileName = 'obs/obs_temperature_{}_{}.nc'.format(decades, resExtrap) bedMaskFileName = 'obs/bed_mask_{}.nc'.format(resExtrap) bedFileName = 'bedmap2/bedmap2_{}.nc'.format(hres) basinNumberFileName = 'imbie/basinNumbers_{}.nc'.format(hres) make_3D_bed_mask(inFileName, bedMaskFileName, bedFileName) for fieldName in ['temperature', 'salinity']: inFileName = 'obs/obs_{}_{}_{}.nc'.format(fieldName, decades, resExtrap) outFileName = 'obs/obs_{}_{}_{}_extrap_horiz.nc'.format( fieldName, decades, resExtrap) progressDir = 'obs/progress_{}'.format(fieldName) matrixDir = 'obs/matrices_{}'.format(fieldName) extrap_horiz(config, inFileName, outFileName, fieldName, bedFileName, basinNumberFileName, bedMaskFileName, progressDir, matrixDir) for fieldName in ['temperature', 'salinity']: inFileName = 'obs/obs_{}_{}_{}_extrap_horiz.nc'.format( fieldName, decades, resExtrap) outFileName = 'obs/obs_{}_{}_{}_extrap_vert.nc'.format( fieldName, decades, resExtrap) extrap_vert(config, inFileName, outFileName, fieldName) tempFileName = \ 'obs/obs_temperature_{}_{}_extrap_vert.nc'.format(decades, resExtrap) salinFileName = \ 'obs/obs_salinity_{}_{}_extrap_vert.nc'.format(decades, resExtrap) outFileName = \ 'obs/obs_thermal_forcing_{}_{}_extrap_vert.nc'.format(decades, resExtrap) compute_thermal_forcing(tempFileName, salinFileName, outFileName) inFileNames = {} outFileNames = {} for fieldName in ['temperature', 'salinity']: inFileNames[fieldName] = \ 'obs/obs_{}_{}_{}_extrap_vert.nc'.format( fieldName, decades, resExtrap) outFileNames[fieldName] = \ 'obs/obs_{}_{}_{}_extrap_vert.nc'.format( fieldName, decades, resFinal) remap_vertical(config, inFileNames, outFileNames, extrap=False) for fieldName in ['temperature', 'salinity']: inFileName = 'obs/obs_{}_{}_{}_extrap_vert.nc'.format( fieldName, decades, resFinal) outFileName = 'obs/obs_{}_{}_{}.nc'.format(fieldName, decades, resFinal) progressDir = 'obs/progress_{}'.format(fieldName) matrixDir = 'obs/matrices_{}'.format(fieldName) extrap_grounded_above_sea_level(config, inFileName, outFileName, fieldName, progressDir, matrixDir)
def _bin_meop(config, inVarName, outVarName): res = get_res(config) outFileName = 'meop/meop_{}_{}.nc'.format(outVarName, res) if os.path.exists(outFileName): return hres = get_horiz_res(config) dz = config.getfloat('grid', 'dzExtrap') nz = config.getint('grid', 'nzExtrap') zOut = dz*numpy.arange(nz+1) z = 0.5*(zOut[0:-1] + zOut[1:]) z_bnds = numpy.zeros((len(z), 2)) z_bnds[:, 0] = zOut[0:-1] z_bnds[:, 1] = zOut[1:] ds = xarray.open_dataset('ismip6/{}_grid.nc'.format(hres)) ds['z'] = (('z',), z) ds.z.attrs['units'] = 'meters' ds.z.attrs['bounds'] = 'z_bnds' ds.z.attrs['standard_name'] = 'depth' ds.z.attrs['positive'] = 'up' ds.z.attrs['axis'] = 'Z' ds['z_bnds'] = (('z', 'nbounds'), z_bnds) ds.z_bnds.attrs['comment'] = 'depth bounds' xMin = ds.x[0].values yMin = ds.y[0].values zMin = z[0] dx = ds.x[1].values - ds.x[0].values nx = ds.sizes['x'] ny = ds.sizes['y'] nz = ds.sizes['z'] outField = numpy.zeros((nz, ny, nx)) entryCount = numpy.zeros((nz, ny, nx), dtype=int) attrs = None proj = get_antarctic_stereographic_projection() fileList = sorted(glob.glob('meop/MEOP-CTD_2018-04-10/*/DATA_ncARGO/*.nc')) print(' Binning MEOP {} profiles...'.format(outVarName)) widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar(), ' ', progressbar.ETA()] bar = progressbar.ProgressBar(widgets=widgets, maxval=len(fileList)).start() for index, fileName in enumerate(fileList): dsProfile = xarray.open_dataset(fileName) lat = dsProfile.LATITUDE.values lon = dsProfile.LONGITUDE.values inField = dsProfile['{}_ADJUSTED'.format(inVarName)].values quality = dsProfile['{}_ADJUSTED_QC'.format(inVarName)].values if attrs is None: attrs = dsProfile[inVarName].attrs x, y = proj(lon, lat) pressure = dsProfile.PRES.values lat = numpy.maximum(lat, -75.) for profile in range(pressure.shape[0]): xBin = int((x[profile]-xMin)/dx) yBin = int((y[profile]-yMin)/dx) if xBin < 0 or xBin >= nx: continue if yBin < 0 or yBin >= ny: continue for level in range(pressure.shape[1]): if quality[profile, level] != b'1': continue press = pressure[profile, level] if numpy.isnan(press): continue depth = gsw.z_from_p(pressure[profile, level], lat[profile]) zBin = int((depth-zMin)/dz) if zBin < 0 or zBin >= nz: continue outField[zBin, yBin, xBin] += inField[profile, level] entryCount[zBin, yBin, xBin] += 1 bar.update(index+1) bar.finish() mask = entryCount > 0 outField[mask] /= entryCount[mask] outField[numpy.logical_not(mask)] = numpy.nan ds[outVarName] = (('z', 'y', 'x'), outField) for attr in ['units', 'long_name', 'comment']: ds[outVarName].attrs[attr] = attrs[attr] ds.to_netcdf(outFileName)