def compute_vel_mag(prefix, inGridName, inDir): config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName description = 'Monthly velocity magnitude climatologies from ' \ '2005-2010 average of the Southern Ocean State ' \ 'Estimate (SOSE)' botDescription = 'Monthly velocity magnitude climatologies at sea ' \ 'floor from 2005-2010 average from SOSE' for gridName in [outGridName]: outFileName = '{}_vel_mag_{}.nc'.format(prefix, gridName) uFileName = '{}_zonal_vel_{}.nc'.format(prefix, gridName) vFileName = '{}_merid_vel_{}.nc'.format(prefix, gridName) if not os.path.exists(outFileName): with xarray.open_dataset(uFileName) as dsU: with xarray.open_dataset(vFileName) as dsV: dsVelMag = dsU.drop(['zonalVel', 'botZonalVel']) dsVelMag['velMag'] = xarray.ufuncs.sqrt(dsU.zonalVel**2 + dsV.meridVel**2) dsVelMag.velMag.attrs['units'] = 'm s$^{-1}$' dsVelMag.velMag.attrs['description'] = description dsVelMag['botVelMag'] = xarray.ufuncs.sqrt( dsU.botZonalVel**2 + dsV.botMeridVel**2) dsVelMag.botVelMag.attrs['units'] = 'm s$^{-1}$' dsVelMag.botVelMag.attrs['description'] = botDescription write_netcdf(dsVelMag, outFileName)
def setup_config(self): configPath = self.datadir.join('config.remap_obs') config = MpasAnalysisConfigParser() config.read(str(configPath)) config.set('input', 'baseDirectory', str(self.datadir)) config.set('oceanObservations', 'baseDirectory', str(self.datadir)) config.set('output', 'baseDirectory', str(self.test_dir)) return config
def setup_config(self, autocloseFileLimitFraction=0.5, maxChunkSize=10000): config = MpasAnalysisConfigParser() config.add_section('input') config.set('input', 'autocloseFileLimitFraction', str(autocloseFileLimitFraction)) config.set('input', 'maxChunkSize', str(maxChunkSize)) return config
def symlink_main_run(config, defaultConfig): ''' Create symlinks to the climatology and time-series directories for the main run that has aleady been computed so we don't have to recompute the analysis. ''' def link_dir(section, option): destDirectory = build_config_full_path(config=config, section='output', relativePathOption=option, relativePathSection=section) if not os.path.exists(destDirectory): sourceDirectory = build_config_full_path( config=mainConfig, section='output', relativePathOption=option, relativePathSection=section) if os.path.exists(sourceDirectory): destBase, _ = os.path.split(destDirectory) make_directories(destBase) os.symlink(sourceDirectory, destDirectory) mainConfigFile = config.get('runs', 'mainRunConfigFile') if not os.path.exists(mainConfigFile): raise OSError('A main config file {} was specified but the ' 'file does not exist'.format(mainConfigFile)) mainConfigFiles = [mainConfigFile] if defaultConfig is not None: mainConfigFiles = [defaultConfig] + mainConfigFiles mainConfig = MpasAnalysisConfigParser() mainConfig.read(mainConfigFiles) for subdirectory in [ 'mpasClimatology', 'timeSeries', 'mapping', 'mask', 'profiles' ]: section = 'output' option = '{}Subdirectory'.format(subdirectory) link_dir(section=section, option=option) for component in ['ocean', 'seaIce']: for subdirectory in ['climatology', 'remappedClim']: section = '{}Observations'.format(component) option = '{}Subdirectory'.format(subdirectory) link_dir(section=section, option=option)
def compute_pot_density(prefix, inGridName, inDir): config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName description = 'Monthly potential density climatologies from ' \ '2005-2010 average of the Southern Ocean State ' \ 'Estimate (SOSE)' botDescription = 'Monthly potential density climatologies at sea ' \ 'floor from 2005-2010 average from SOSE' for gridName in [inGridName, outGridName]: outFileName = '{}_pot_den_{}.nc'.format(prefix, gridName) TFileName = '{}_pot_temp_{}.nc'.format(prefix, gridName) SFileName = '{}_salinity_{}.nc'.format(prefix, gridName) if not os.path.exists(outFileName): with xarray.open_dataset(TFileName) as dsT: with xarray.open_dataset(SFileName) as dsS: dsPotDensity = dsT.drop(['theta', 'botTheta']) lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.z) pressure = gsw.p_from_z(z.values, lat.values) SA = gsw.SA_from_SP(dsS.salinity.values, pressure, lon.values, lat.values) CT = gsw.CT_from_pt(SA, dsT.theta.values) dsPotDensity['potentialDensity'] = (dsS.salinity.dims, gsw.rho(SA, CT, 0.)) dsPotDensity.potentialDensity.attrs['units'] = \ 'kg m$^{-3}$' dsPotDensity.potentialDensity.attrs['description'] = \ description lat, lon, z = xarray.broadcast(dsS.lat, dsS.lon, dsS.zBot) pressure = gsw.p_from_z(z.values, lat.values) SA = gsw.SA_from_SP(dsS.botSalinity.values, pressure, lon.values, lat.values) CT = gsw.CT_from_pt(SA, dsT.botTheta.values) dsPotDensity['botPotentialDensity'] = \ (dsS.botSalinity.dims, gsw.rho(SA, CT, 0.)) dsPotDensity.botPotentialDensity.attrs['units'] = \ 'kg m$^{-3}$' dsPotDensity.botPotentialDensity.attrs['description'] = \ botDescription write_netcdf(dsPotDensity, outFileName)
def remap_v(prefix, inGridName, inGridFileName, inDir, inVPrefix): cacheVFileName = '{}_merid_vel_{}.nc'.format(prefix, inGridName) config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') matGrid = loadmat(inGridFileName) # lat/lon is a tensor grid so we can use 1-D arrays lon = matGrid['XC'][:, 0] lat = matGrid['YG'][0, :] z = matGrid['RC'][:, 0] cellFraction = matGrid['hFacS'] botIndices = get_bottom_indices(cellFraction) with sose_v_to_nc('{}/{}'.format(inDir, inVPrefix), cacheVFileName, lon, lat, z, cellFraction, botIndices) \ as dsV: inDescriptor = LatLonGridDescriptor() inDescriptor = LatLonGridDescriptor.read(cacheVFileName, latVarName='lat', lonVarName='lon') outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName outVFileName = '{}_merid_vel_{}.nc'.format(prefix, outGridName) mappingFileName = '{}/map_V_{}_to_{}.nc'.format( inDir, inGridName, outGridName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear') if not os.path.exists(outVFileName): print('Remapping meridional velocity...') with remapper.remap(dsV, renormalizationThreshold=0.01) \ as remappedV: print('Done.') remappedV.attrs['history'] = ' '.join(sys.argv) write_netcdf(remappedV, outVFileName)
def __init__(self, config, componentName, taskName=None): # {{{ ''' Construct the analysis task and adds it as a subtask of the ``parentTask``. Parameters ---------- config : ``MpasAnalysisConfigParser`` Contains configuration options componentName : {'ocean', 'seaIce'} The name of the component (same as the folder where the task resides) taskName : str, optional the name of the task, defaults to mpasClimatology<ComponentName> ''' # Authors # ------- # Xylar Asay-Davis if taskName is None: suffix = componentName[0].upper() + componentName[1:] taskName = 'refYearMpasClimatology{}'.format(suffix) # make a deep copy of the config so we can change the start and end # years and dates without causing trouble for other tasks config_string = StringIO() config.write(config_string) # We must reset the buffer to make it ready for reading. config_string.seek(0) new_config = MpasAnalysisConfigParser() new_config.read_file(config_string) # call the constructor from the base class (AnalysisTask) super(RefYearMpasClimatologyTask, self).__init__(config=new_config, componentName=componentName, taskName=taskName)
def remap(inDir, outDir): inGridName = 'SouthernOcean_0.25x0.125degree' inFileName = '{}/Schmidtko_et_al_2014_bottom_PT_S_PD_{}.nc'.format( inDir, inGridName) config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') inDescriptor = LatLonGridDescriptor() inDescriptor = LatLonGridDescriptor.read(inFileName, latVarName='lat', lonVarName='lon') outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName outFileName = '{}/Schmidtko_et_al_2014_bottom_PT_S_PD_{}.nc'.format( outDir, outGridName) mappingFileName = '{}/map_{}_to_{}.nc'.format(inDir, inGridName, outGridName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear') if not os.path.exists(outFileName): print('Remapping...') with xarray.open_dataset(inFileName) as dsIn: with remapper.remap(dsIn, renormalizationThreshold=0.01) \ as remappedMLD: print('Done.') remappedMLD.attrs['history'] = ' '.join(sys.argv) write_netcdf(remappedMLD, outFileName)
def get_latlon_array_descriptor(self): configPath = str(self.datadir.join('config.analysis')) config = MpasAnalysisConfigParser() config.read(configPath) lat = numpy.array( config.getExpression('interpolate', 'lat', usenumpyfunc=True)) lon = numpy.array( config.getExpression('interpolate', 'lon', usenumpyfunc=True)) descriptor = LatLonGridDescriptor.create(lat, lon, units='degrees') return descriptor
def doTest(generate, expectedResults): config = MpasAnalysisConfigParser() config.add_section('output') config.set('output', 'generate', generate) for taskName in expectedResults: genericTask = AnalysisTask(config=config, taskName=taskName, componentName=cores[taskName], tags=tags[taskName]) expectedResult = expectedResults[taskName] result = genericTask.check_generate() self.assertEqual(result, expectedResult)
def remap_pt_s(prefix, inGridName, inGridFileName, inDir, inTPrefix, inSPrefix, inGammaNPrefix): cacheTFileName = '{}_pot_temp_{}.nc'.format(prefix, inGridName) cacheSFileName = '{}_salinity_{}.nc'.format(prefix, inGridName) cacheGammaNFileName = '{}_neut_den_{}.nc'.format(prefix, inGridName) config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') matGrid = loadmat(inGridFileName) # lat/lon is a tensor grid so we can use 1-D arrays lon = matGrid['XC'][:, 0] lat = matGrid['YC'][0, :] z = matGrid['RC'][:, 0] cellFraction = matGrid['hFacC'] botIndices = get_bottom_indices(cellFraction) with sose_pt_to_nc('{}/{}'.format(inDir, inTPrefix), cacheTFileName, lon, lat, z, cellFraction, botIndices) \ as dsT: inDescriptor = LatLonGridDescriptor() inDescriptor = LatLonGridDescriptor.read(cacheTFileName, latVarName='lat', lonVarName='lon') outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName outTFileName = '{}_pot_temp_{}.nc'.format(prefix, outGridName) outSFileName = '{}_salinity_{}.nc'.format(prefix, outGridName) outGammaNFileName = '{}_neut_den_{}.nc'.format(prefix, outGridName) mappingFileName = '{}/map_C_{}_to_{}.nc'.format( inDir, inGridName, outGridName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear') if not os.path.exists(outTFileName): dsT.reset_coords(names='zBot', inplace=True) print('Remapping potential temperature...') with remapper.remap(dsT, renormalizationThreshold=0.01) \ as remappedT: print('Done.') remappedT.attrs['history'] = ' '.join(sys.argv) remappedT.set_coords(names='zBot', inplace=True) write_netcdf(remappedT, outTFileName) with sose_s_to_nc('{}/{}'.format(inDir, inSPrefix), cacheSFileName, lon, lat, z, cellFraction, botIndices) \ as dsS: if not os.path.exists(outSFileName): dsS.reset_coords(names='zBot', inplace=True) print('Remapping salinity...') with remapper.remap(dsS, renormalizationThreshold=0.01) \ as remappedS: print('Done.') remappedS.attrs['history'] = ' '.join(sys.argv) remappedS.set_coords(names='zBot', inplace=True) write_netcdf(remappedS, outSFileName) with sose_gammaN_to_nc('{}/{}'.format(inDir, inGammaNPrefix), cacheGammaNFileName, lon, lat, z, cellFraction, botIndices) \ as dsGammaN: if not os.path.exists(outGammaNFileName): dsGammaN.reset_coords(names='zBot', inplace=True) print('Remapping neutral density...') with remapper.remap(dsGammaN, renormalizationThreshold=0.01) \ as remappedGammaN: print('Done.') remappedGammaN.attrs['history'] = ' '.join(sys.argv) remappedGammaN.set_coords(names='zBot', inplace=True) write_netcdf(remappedGammaN, outGammaNFileName)
def main(): """ Entry point for the main script ``mpas_analysis`` """ parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument('-v', '--version', action='version', version='mpas_analysis {}'.format( mpas_analysis.__version__), help="Show version number and exit") parser.add_argument("--setup_only", dest="setup_only", action='store_true', help="If only the setup phase, not the run or HTML " "generation phases, should be executed.") parser.add_argument("--html_only", dest="html_only", action='store_true', help="If only the setup and HTML generation phases, " "not the run phase, should be executed.") parser.add_argument("-g", "--generate", dest="generate", help="A list of analysis modules to generate " "(nearly identical generate option in config file).", metavar="ANALYSIS1[,ANALYSIS2,ANALYSIS3,...]") parser.add_argument("-l", "--list", dest="list", action='store_true', help="List the available analysis tasks") parser.add_argument("-p", "--purge", dest="purge", action='store_true', help="Purge the analysis by deleting the output" "directory before running") parser.add_argument('configFiles', metavar='CONFIG', type=str, nargs='*', help='config file') parser.add_argument("--plot_colormaps", dest="plot_colormaps", action='store_true', help="Make a plot displaying all available colormaps") parser.add_argument("--verbose", dest="verbose", action='store_true', help="Verbose error reporting during setup-and-check " "phase") args = parser.parse_args() if len(sys.argv) == 1: parser.print_help() sys.exit(0) for configFile in args.configFiles: if not os.path.exists(configFile): raise OSError('Config file {} not found.'.format(configFile)) # add config.default to cover default not included in the config files # provided on the command line if pkg_resources.resource_exists('mpas_analysis', 'config.default'): defaultConfig = pkg_resources.resource_filename( 'mpas_analysis', 'config.default') configFiles = [defaultConfig] + args.configFiles else: print('WARNING: Did not find config.default. Assuming other config ' 'file(s) contain a\n' 'full set of configuration options.') defaultConfig = None configFiles = args.configFiles config = MpasAnalysisConfigParser() config.read(configFiles) if args.list: analyses = build_analysis_list(config, controlConfig=None) for analysisTask in analyses: print('task: {}'.format(analysisTask.taskName)) print(' component: {}'.format(analysisTask.componentName)), print(' tags: {}'.format(', '.join(analysisTask.tags))) sys.exit(0) if args.plot_colormaps: _register_custom_colormaps() _plot_color_gradients() sys.exit(0) if config.has_option('runs', 'controlRunConfigFile'): controlConfigFile = config.get('runs', 'controlRunConfigFile') if not os.path.exists(controlConfigFile): raise OSError('A control config file {} was specified but the ' 'file does not exist'.format(controlConfigFile)) controlConfigFiles = [controlConfigFile] if defaultConfig is not None: controlConfigFiles = [defaultConfig] + controlConfigFiles controlConfig = MpasAnalysisConfigParser() controlConfig.read(controlConfigFiles) # replace the log directory so log files get written to this run's # log directory, not the control run's logsDirectory = build_config_full_path(config, 'output', 'logsSubdirectory') controlConfig.set('output', 'logsSubdirectory', logsDirectory) print('Comparing to control run {} rather than observations. \n' 'Make sure that MPAS-Analysis has been run previously with the ' 'control config file.'.format( controlConfig.get('runs', 'mainRunName'))) else: controlConfig = None if args.purge: purge_output(config) if config.has_option('runs', 'mainRunConfigFile'): symlink_main_run(config, defaultConfig) if args.generate: update_generate(config, args.generate) if controlConfig is not None: # we want to use the "generate" option from the current run, not # the control config file controlConfig.set('output', 'generate', config.get('output', 'generate')) logsDirectory = build_config_full_path(config, 'output', 'logsSubdirectory') make_directories(logsDirectory) update_time_bounds_in_config(config) file_cache_maxsize = config.getint('input', 'file_cache_maxsize') try: xarray.set_options(file_cache_maxsize=file_cache_maxsize) except ValueError: # xarray version doesn't support file_cache_maxsize yet... pass startTime = time.time() analyses = build_analysis_list(config, controlConfig) analyses = determine_analyses_to_generate(analyses, args.verbose) setupDuration = time.time() - startTime if not args.setup_only and not args.html_only: run_analysis(config, analyses) runDuration = time.time() - startTime m, s = divmod(setupDuration, 60) h, m = divmod(int(m), 60) print('Total setup time: {}:{:02d}:{:05.2f}'.format(h, m, s)) m, s = divmod(runDuration, 60) h, m = divmod(int(m), 60) print('Total run time: {}:{:02d}:{:05.2f}'.format(h, m, s)) if not args.setup_only: generate_html(config, analyses, controlConfig)
def setup_config(self, maxChunkSize=10000): config = MpasAnalysisConfigParser() config.add_section('input') config.set('input', 'maxChunkSize', str(maxChunkSize)) return config
resolution. ''' from mpas_analysis.shared.interpolation import Remapper from mpas_analysis.shared.grid import MpasMeshDescriptor from mpas_analysis.shared.climatology import get_comparison_descriptor from mpas_analysis.configuration import MpasAnalysisConfigParser # replace with the MPAS mesh name inGridName = 'oQU240' # replace with the path to the desired mesh or restart file inGridFileName = '/media/xylar/extra_data/analysis/edison/G-QU240-master-intel/run/mpaso.rst.0001-01-06_00000.nc' config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') # replace these numbers with the desired size and resolution of the output mesh config.set('climatology', 'comparisonAntarcticStereoWidth', '6000.0') config.set('climatology', 'comparisonAntarcticStereoResolution', '10.0') inDescriptor = MpasMeshDescriptor(inGridFileName, inGridName) outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = outDescriptor.meshName mappingFileName = 'map_{}_to_{}.nc'.format(inGridName, outGridName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear')
def setup_config(self): configPath = self.datadir.join('config.analysis') self.config = MpasAnalysisConfigParser() self.config.read(str(configPath))
class TestMPASAnalysisConfigParser(TestCase): def setup_config(self): configPath = self.datadir.join('config.analysis') self.config = MpasAnalysisConfigParser() self.config.read(str(configPath)) def check_container(self, container, container_type, item_type): assert isinstance(container, container_type) for item in container: assert isinstance(item, item_type) def test_read_config(self): self.setup_config() colorMapName = self.config.get('sst_modelvsobs', 'cmapDiff') self.assertEqual(colorMapName, 'coolwarm') self.assertEqual(self.config.getint('Test', 'testInt'), 15) self.assertEqual(self.config.getExpression('Test', 'testInt'), 15) self.assertEqual(self.config.getfloat('Test', 'testFloat'), 18.0) self.assertEqual(self.config.getExpression('Test', 'testFloat'), 18.0) self.assertEqual(self.config.getfloat('Test', 'testFloat2'), 3.) self.assertEqual(self.config.getExpression('Test', 'testFloat2'), 3.) self.assertEqual(self.config.getboolean('Test', 'testBool'), True) self.assertEqual(self.config.getExpression('Test', 'testBool'), True) testList = self.config.getExpression('sst_modelvsobs', 'cmapIndicesModelObs') self.check_container(testList, list, int) self.assertEqual(testList, [0, 40, 80, 110, 140, 170, 200, 230, 255]) testList = self.config.getExpression('sst_modelvsobs', 'cmapIndicesModelObs', elementType=float) self.check_container(testList, list, float) self.assertEqual(testList, [0., 40., 80., 110., 140., 170., 200., 230., 255.]) testList = self.config.getExpression('sst_modelvsobs', 'comparisonTimes') self.check_container(testList, list, str) self.assertEqual(testList, ['JFM', 'JAS', 'ANN']) testList = self.config.getExpression('Test', 'testList') self.check_container(testList, list, float) self.assertEqual(testList, [0.5, 0.1, 0.5]) testTuple = self.config.getExpression('Test', 'testTuple') assert isinstance(testTuple, tuple) self.assertEqual(testTuple, (5, 0.1, 'string')) testDict = self.config.getExpression('Test', 'testDict') assert isinstance(testDict, dict) self.assertEqual(testDict, { 'key1': 'string', 'key2': -12, 'key3': False }) with six.assertRaisesRegex( self, configparser.NoOptionError, "No option 'doesntexist' in section: 'Test'"): self.config.getExpression(str('Test'), str('doesntexist')) @requires_numpy def test_read_config_numpy(self): self.setup_config() # tests numpy evaluation capability import numpy as np for testname in ['testNumpyarange' + str(ii) for ii in np.arange(3)]: self.assertArrayEqual( self.config.getExpression('TestNumpy', testname, usenumpyfunc=True), np.arange(0, 1, 10)) for testname in ['testNumpylinspace' + str(ii) for ii in np.arange(3)]: self.assertArrayEqual( self.config.getExpression('TestNumpy', testname, usenumpyfunc=True), np.linspace(0, 1, 10)) for testNumpy in ['testNumpypi' + str(ii) for ii in np.arange(3)] + \ ['testNumpyPi']: self.assertEqual( self.config.getExpression('TestNumpy', testNumpy, usenumpyfunc=True), np.pi) with six.assertRaisesRegex( self, AssertionError, "'__' is not allowed in .* for `usenumpyfunc=True`"): self.config.getExpression('TestNumpy', 'testBadStr', usenumpyfunc=True), def test_get_with_default(self): self.setup_config() def check_get_with_default(name, value, dtype): # test an options that doesn't exist using getWithDefault var = self.config.getWithDefault('sst_modelvsobs', name, value) assert isinstance(var, dtype) self.assertEqual(var, value) # test several types with getWithDefault check_get_with_default(name='aBool', value=True, dtype=bool) check_get_with_default(name='anInt', value=1, dtype=six.integer_types) check_get_with_default(name='aFloat', value=1.0, dtype=float) check_get_with_default(name='aList', value=[1, 2, 3], dtype=list) check_get_with_default(name='aTuple', value=(1, 2, 3), dtype=tuple) check_get_with_default(name='aDict', value={'blah': 1}, dtype=dict) check_get_with_default(name='aStr', value='blah', dtype=six.string_types)
def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-i", "--inDir", dest="inDir", required=True, help="Directory where intermediate files used in " "processing should be downloaded") parser.add_argument("-o", "--outDir", dest="outDir", required=True, help="Directory where final preprocessed observation " "are stored") args = parser.parse_args() date = '20190603' inGridName = 'SouthernOcean_0.167x0.167degree_{}'.format(date) inTPrefix = 'THETA_mnthlyBar.0000000100' inSPrefix = 'SALT_mnthlyBar.0000000100' inMLDPrefix = 'MLD_mnthlyBar.0000000100' inUPrefix = 'UVEL_mnthlyBar.0000000100' inVPrefix = 'VVEL_mnthlyBar.0000000100' inGammaNPrefix = 'GAMMA_mnthlyBar.0000000100' # size in km of the polar stereographic grid antarcticStereoWidth = 10000 config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') config.set('climatology', 'comparisonAntarcticStereoWidth', '{}'.format(antarcticStereoWidth)) outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = '{}_{}'.format(outDescriptor.meshName, date) inPrefixes = [ inTPrefix, inSPrefix, inMLDPrefix, inUPrefix, inVPrefix, inGammaNPrefix ] inGridFileName = '{}/grid.mat'.format(args.inDir) try: os.makedirs(args.inDir) except OSError: pass try: os.makedirs(args.outDir) except OSError: pass # dowload the desired file download_files(['GRID_README.txt'], urlBase='http://sose.ucsd.edu/DATA', outDir=args.inDir) urlBase = 'http://sose.ucsd.edu/DATA/SO6_V2' fileList = ['grid.mat'] for prefix in inPrefixes: fileList.append('{}.data.gz'.format(prefix)) fileList.append('{}.meta'.format(prefix)) download_files(fileList, urlBase, args.inDir) unzip_sose_data(inPrefixes, args.inDir) prefix = '{}/SOSE'.format(args.outDir) sose_volume_to_nc(prefix, inGridName, inGridFileName, args.inDir) prefix = '{}/SOSE_2005-2010_monthly'.format(args.outDir) remap_pt_s(prefix, inGridName, inGridFileName, args.inDir, inTPrefix, inSPrefix, inGammaNPrefix, outDescriptor, outGridName) remap_mld(prefix, inGridName, inGridFileName, args.inDir, inMLDPrefix, outDescriptor, outGridName) remap_u(prefix, inGridName, inGridFileName, args.inDir, inUPrefix, outDescriptor, outGridName) remap_v(prefix, inGridName, inGridFileName, args.inDir, inVPrefix, outDescriptor, outGridName) compute_vel_mag(prefix, inGridName, args.inDir, outGridName) compute_pot_density(prefix, inGridName, args.inDir, outGridName)
Modify the grid name, the path to the MPAS grid file and the output grid resolution. ''' from mpas_analysis.shared.interpolation import Remapper from mpas_analysis.shared.grid import MpasMeshDescriptor from mpas_analysis.shared.climatology import get_comparison_descriptor from mpas_analysis.configuration import MpasAnalysisConfigParser # replace with the MPAS mesh name inGridName = 'oQU240' # replace with the path to the desired mesh or restart file inGridFileName = '/media/xylar/extra_data/analysis/edison/G-QU240-master-intel/run/mpaso.rst.0001-01-06_00000.nc' config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') # replace 1.0 with the desired resolution of the output mesh config.set('climatology', 'comparisonLatResolution', '1.0') config.set('climatology', 'comparisonLonResolution', '1.0') inDescriptor = MpasMeshDescriptor(inGridFileName, inGridName) outDescriptor = get_comparison_descriptor(config, 'latlon') outGridName = outDescriptor.meshName mappingFileName = 'map_{}_to_{}.nc'.format(inGridName, outGridName) remapper = Remapper(inDescriptor, outDescriptor, mappingFileName) remapper.build_mapping_file(method='bilinear')
def setup_config(self, autocloseFileLimitFraction=0.5, maxChunkSize=10000): config = MpasAnalysisConfigParser() config.add_section('input') config.set('input', 'autocloseFileLimitFraction', str(autocloseFileLimitFraction)) config.set('input', 'maxChunkSize', str(maxChunkSize)) config.set('input', 'mpasMeshName', 'QU240') config.add_section('output') config.set('output', 'baseDirectory', self.test_dir) config.set('output', 'mappingSubdirectory', '.') config.set('output', 'mpasClimatologySubdirectory', 'clim/mpas') config.add_section('climatology') config.set('climatology', 'startYear', '2') config.set('climatology', 'endYear', '2') config.set('climatology', 'comparisonLatResolution', '0.5') config.set('climatology', 'comparisonLonResolution', '0.5') config.set('climatology', 'mpasInterpolationMethod', 'bilinear') config.add_section('oceanObservations') config.set('oceanObservations', 'interpolationMethod', 'bilinear') config.set('oceanObservations', 'climatologySubdirectory', 'clim/obs') config.set('oceanObservations', 'remappedClimSubdirectory', 'clim/obs/remap') return config