def download_analysis_data(): """ Entry point for downloading the input data set from public repository for MPAS-Analysis to work. The input data set includes: pre-processed observations data, MPAS mapping files and MPAS regional mask files (which are used for the MOC computation), for a subset of MPAS meshes. """ parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-o", "--outDir", dest="outDir", required=True, help="Directory where MPAS-Analysis input data will" "be downloaded") args = parser.parse_args() try: os.makedirs(args.outDir) except OSError: pass urlBase = 'https://web.lcrc.anl.gov/public/e3sm/diagnostics' analysisFileList = pkg_resources.resource_string( 'mpas_analysis', 'obs/analysis_input_files').decode('utf-8') # remove any empty strings from the list analysisFileList = list(filter(None, analysisFileList.split('\n'))) download_files(analysisFileList, urlBase, args.outDir)
inGammaNPrefix = 'GAMMA_mnthlyBar.0000000100' inPrefixes = [ inTPrefix, inSPrefix, inMLDPrefix, inUPrefix, inVPrefix, inGammaNPrefix ] inGridFileName = '{}/grid.mat'.format(args.inDir) try: os.makedirs(args.inDir) except OSError: pass # dowload the desired file download_files(['GRID_README.txt'], urlBase='http://sose.ucsd.edu/DATA', outDir=args.inDir) urlBase = 'http://sose.ucsd.edu/DATA/SO6_V2' fileList = ['grid.mat'] for prefix in inPrefixes: fileList.append('{}.data.gz'.format(prefix)) fileList.append('{}.meta'.format(prefix)) download_files(fileList, urlBase, args.inDir) unzip_sose_data(inPrefixes, args.inDir) prefix = '{}/SOSE_2005-2010_monthly'.format(args.outDir) remap_pt_s(prefix, inGridName, inGridFileName, args.inDir, inTPrefix,
parser.add_argument("-i", "--inDir", dest="inDir", required=True, help="Directory where intermediate files used in " "processing should be downloaded") parser.add_argument("-o", "--outDir", dest="outDir", required=True, help="Directory where final preprocessed observation " "are stored") args = parser.parse_args() try: os.makedirs(args.inDir) except OSError: pass try: os.makedirs(args.outDir) except OSError: pass urlBase = 'https://www.geomar.de/fileadmin/personal/fb1/po/sschmidtko/' fileName = 'Antarctic_shelf_data.txt' download_files([fileName], urlBase, args.inDir) text_to_netcdf(args.inDir, args.inDir) remap(args.inDir, args.outDir)
def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-i", "--inDir", dest="inDir", required=True, help="Directory where intermediate files used in " "processing should be downloaded") parser.add_argument("-o", "--outDir", dest="outDir", required=True, help="Directory where final preprocessed observation " "are stored") args = parser.parse_args() date = '20190603' inGridName = 'SouthernOcean_0.167x0.167degree_{}'.format(date) inTPrefix = 'THETA_mnthlyBar.0000000100' inSPrefix = 'SALT_mnthlyBar.0000000100' inMLDPrefix = 'MLD_mnthlyBar.0000000100' inUPrefix = 'UVEL_mnthlyBar.0000000100' inVPrefix = 'VVEL_mnthlyBar.0000000100' inGammaNPrefix = 'GAMMA_mnthlyBar.0000000100' # size in km of the polar stereographic grid antarcticStereoWidth = 10000 config = MpasAnalysisConfigParser() config.read('mpas_analysis/config.default') config.set('climatology', 'comparisonAntarcticStereoWidth', '{}'.format(antarcticStereoWidth)) outDescriptor = get_comparison_descriptor(config, 'antarctic') outGridName = '{}_{}'.format(outDescriptor.meshName, date) inPrefixes = [ inTPrefix, inSPrefix, inMLDPrefix, inUPrefix, inVPrefix, inGammaNPrefix ] inGridFileName = '{}/grid.mat'.format(args.inDir) try: os.makedirs(args.inDir) except OSError: pass try: os.makedirs(args.outDir) except OSError: pass # dowload the desired file download_files(['GRID_README.txt'], urlBase='http://sose.ucsd.edu/DATA', outDir=args.inDir) urlBase = 'http://sose.ucsd.edu/DATA/SO6_V2' fileList = ['grid.mat'] for prefix in inPrefixes: fileList.append('{}.data.gz'.format(prefix)) fileList.append('{}.meta'.format(prefix)) download_files(fileList, urlBase, args.inDir) unzip_sose_data(inPrefixes, args.inDir) prefix = '{}/SOSE'.format(args.outDir) sose_volume_to_nc(prefix, inGridName, inGridFileName, args.inDir) prefix = '{}/SOSE_2005-2010_monthly'.format(args.outDir) remap_pt_s(prefix, inGridName, inGridFileName, args.inDir, inTPrefix, inSPrefix, inGammaNPrefix, outDescriptor, outGridName) remap_mld(prefix, inGridName, inGridFileName, args.inDir, inMLDPrefix, outDescriptor, outGridName) remap_u(prefix, inGridName, inGridFileName, args.inDir, inUPrefix, outDescriptor, outGridName) remap_v(prefix, inGridName, inGridFileName, args.inDir, inVPrefix, outDescriptor, outGridName) compute_vel_mag(prefix, inGridName, args.inDir, outGridName) compute_pot_density(prefix, inGridName, args.inDir, outGridName)
seawifs_files = [ 'S19972442010273.L3m_MC_CHL_chlor_a_9km.nc', 'S19972742010304.L3m_MC_CHL_chlor_a_9km.nc', 'S19973052010334.L3m_MC_CHL_chlor_a_9km.nc', 'S19973352010365.L3m_MC_CHL_chlor_a_9km.nc', 'S19980012010031.L3m_MC_CHL_chlor_a_9km.nc', 'S19980322010059.L3m_MC_CHL_chlor_a_9km.nc', 'S19980602010090.L3m_MC_CHL_chlor_a_9km.nc', 'S19980912010120.L3m_MC_CHL_chlor_a_9km.nc', 'S19981212010151.L3m_MC_CHL_chlor_a_9km.nc', 'S19981522010181.L3m_MC_CHL_chlor_a_9km.nc', 'S19981822010212.L3m_MC_CHL_chlor_a_9km.nc', 'S19982132010243.L3m_MC_CHL_chlor_a_9km.nc' ] urlBase = 'https://oceandata.sci.gsfc.nasa.gov/cgi/getfile/' download_files(seawifs_files, urlBase=urlBase, outDir=args.inDir) process_SeaWIFS(args.inDir, args.outDir) # + + + GLODAPv2 data + + + urlBase = ('https://www.nodc.noaa.gov/archive/arc0107/0162565/2.2/data/' + '0-data/mapped/') download_files(['GLODAPv2.2016b_MappedClimatologies.tar.gz'], urlBase=urlBase, outDir=args.inDir) process_GLODAP(args.inDir, args.outDir) # + + + Landschuetzer Carbon Flux + + + urlBase = ('https://www.nodc.noaa.gov/archive/arc0105/0160558/3.3/' + 'data/0-data/') download_files(['spco2_1982-2015_MPI_SOM-FFN_v2016.nc'], urlBase=urlBase,
import os import argparse import pkg_resources from mpas_analysis.shared.io.download import download_files if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawTextHelpFormatter) parser.add_argument("-o", "--outDir", dest="outDir", required=True, help="Directory where MPAS-Analysis input data will" "be downloaded") args = parser.parse_args() try: os.makedirs(args.outDir) except OSError: pass urlBase = 'https://web.lcrc.anl.gov/public/e3sm/diagnostics' analysisFileList = pkg_resources.resource_string( 'mpas_analysis', 'obs/analysis_input_files').decode('utf-8') # remove any empty strings from the list analysisFileList = list(filter(None, analysisFileList.split('\n'))) download_files(analysisFileList, urlBase, args.outDir)