Beispiel #1
0
def extract_tseries(config):

    logger = shared.get_logger()
    logger.info('*** EXTRACTING TIME SERIES ***')

    wrfout_dir = config['wrfout_dir']
    tseries_dir = config['tseries_dir']
    json_dir = config['json_dir']
    init_time = config['init_time']
    dom = config['dom']
    fcst_file = '%s/wrfout_d%02d_%s:00:00.nc' % (
        wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H")
    )  # note we add on the nc extension here
    loc_file = config['locations_file']
    ncl_code = config['tseries_code']
    extract_hgts = config['extract_hgts']
    tseries_fmt = config['tseries_fmt']
    ncl_opt_file = config['ncl_opt_file']

    ncl_log = config['ncl_log']
    if not os.path.exists(tseries_dir):
        os.makedirs(tseries_dir)

    # Always go via the netcdf file
    tseries_file = '%s/tseries_d%02d_%s.nc' % (
        tseries_dir, dom, init_time.strftime("%Y-%m-%d_%H"))

    os.environ['FCST_FILE'] = fcst_file
    os.environ['LOCATIONS_FILE'] = loc_file
    os.environ['NCL_OUT_DIR'] = tseries_dir
    os.environ['NCL_OUT_FILE'] = tseries_file
    os.environ['NCL_OPT_FILE'] = ncl_opt_file

    logger.debug('Setting environment variables')
    logger.debug('FCST_FILE    ----> %s' % fcst_file)
    logger.debug('NCL_OUT_DIR  ----> %s' % tseries_dir)
    logger.debug('NCL_OUT_FILE  ----> %s' % tseries_file)
    logger.debug('LOCATIONS_FILE ----> %s' % loc_file)
    logger.debug('NCL_OPT_FILE   ----> %s' % ncl_opt_file)
    logger.debug(extract_hgts)

    ncl_hgts = '(/%s/)' % ','.join(map(str, extract_hgts))

    for script in ncl_code:
        cmd = "ncl 'extract_heights=%s'  %s >> %s 2>&1" % (ncl_hgts, script,
                                                           ncl_log)
        shared.run_cmd(cmd, config)

    ncdump(config)
Beispiel #2
0
def extract_tseries(config):

    logger = shared.get_logger()
    logger.info('*** EXTRACTING TIME SERIES ***')
     
    wrfout_dir     = config['wrfout_dir']
    tseries_dir    = config['tseries_dir']
    json_dir       = config['json_dir']
    init_time      = config['init_time']
    dom            = config['dom']
    fcst_file      = '%s/wrfout_d%02d_%s:00:00.nc' %(wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H")) # note we add on the nc extension here
    loc_file       = config['locations_file']
    ncl_code       = config['tseries_code']
    extract_hgts   = config['extract_hgts']
    tseries_fmt    = config['tseries_fmt']
    ncl_opt_file   = config['ncl_opt_file']
    
    
    ncl_log        = config['ncl_log']
    if not os.path.exists(tseries_dir):
        os.makedirs(tseries_dir)
    
    # Always go via the netcdf file
    tseries_file = '%s/tseries_d%02d_%s.nc' % (tseries_dir, dom,init_time.strftime("%Y-%m-%d_%H"))

    os.environ['FCST_FILE']      = fcst_file
    os.environ['LOCATIONS_FILE'] = loc_file
    os.environ['NCL_OUT_DIR']    = tseries_dir
    os.environ['NCL_OUT_FILE']   = tseries_file
    os.environ['NCL_OPT_FILE']   = ncl_opt_file
    
    
    logger.debug('Setting environment variables')
    logger.debug('FCST_FILE    ----> %s'  % fcst_file)
    logger.debug('NCL_OUT_DIR  ----> %s'  % tseries_dir)
    logger.debug('NCL_OUT_FILE  ----> %s' % tseries_file)
    logger.debug('LOCATIONS_FILE ----> %s' % loc_file)
    logger.debug('NCL_OPT_FILE   ----> %s' % ncl_opt_file)
    logger.debug(extract_hgts)

    ncl_hgts = '(/%s/)' % ','.join(map(str,extract_hgts))
    
    for script in ncl_code:
        cmd  = "ncl 'extract_heights=%s'  %s >> %s 2>&1" % (ncl_hgts,script, ncl_log)
        shared.run_cmd(cmd, config)

    ncdump(config)
Beispiel #3
0
def readPRISM():
    #### read the PRISM data and aglined with GCM
    import numpy as np
    from netCDF4 import Dataset
    import os
    from ncdump import ncdump
    
    prefix = '/home/yumin/Desktop/DS/DATA/'
    filepath = prefix+'PRISM/monthly/ppt/PRISM_ppt_stable_4kmM3_1950-2005_monthly_total.nc'
    savepath = prefix+'PRISM/PRISMdata/ppt/processeddata/prism_prmean_monthly_0.04by0.04_original_grid/'
    #%%
    dataset = Dataset(filepath,mode='r')
    nc_attrs, nc_dims, nc_vars = ncdump(dataset)
    
    # original latitude from 49.9375N to 24.1042N by 1/24 (~0.04), 621 points
    # latitude from N to N by ,  points
    lats_o = dataset.variables['lat'][:]
    # original longitude from -125.021 to -66.5208 by 1/24 (~0.04), 1405 points
    # original longitude from 234.979E to 293.479E by 1/24 (~0.04), 1405 points
    lons_o = dataset.variables['lon'][:]+360 
    # time from 195001 to 200512 by 1, 672 points
    time = dataset.variables['time']#
    time = list(time)
    
    precipitation_o = dataset.variables['ppt'][:] #total precipitation, [month,lat,lon], mm/month
    prmean_month = np.ma.filled(precipitation_o,0)/30.0 # daily mean precipitation, unite: mm/day
    
    #%% align with GCM grid
    ## latitude from [49.9167N,24.125N] by 1/24 (~0.04), 620 points
    ## longitude from [235E,293.458E] by 1/24 (~0.04), 1404 points
    lats = np.zeros(len(lats_o)-1,)
    lons = np.zeros(len(lons_o)-1,)
    for i in range(len(lats_o)-1):
        lats[i] = 0.5*(lats_o[i]+lats_o[i+1])
    for i in range(len(lons_o)-1):
        lons[i] = 0.5*(lons_o[i]+lons_o[i+1])
    (Nmon,Nlat,Nlon) = prmean_month.shape
    prmean_month_prism = np.zeros((Nmon,Nlat-1,Nlon-1)) # [672, 117, 297],[672, 101, 245]
    for t in range(Nmon):
        for i in range(Nlat-1):
            for j in range(Nlon-1):
                prmean_month_prism[t,i,j] = 0.25*(prmean_month[t,i,j]+prmean_month[t,i+1,j]
                                          +prmean_month[t,i,j+1]+prmean_month[t,i+1,j+1])
    
    prmean_month_prism[prmean_month_prism<0] = 0
def readGCM():
    '''
    generate whole USA map, saved on 12/11/2019
    read and process NASA GCM data and aglined
    '''
    import numpy as np
    from netCDF4 import Dataset
    from ncdump import ncdump
    from os import listdir
    from os.path import isfile, join
    import os

    variable = 'tasmin'  #'tasmax' #['pr' 'tas' 'tasmax' 'tasmin'] 'pr_37models'
    filepath = '/home/yumin/myProgramFiles/DATA/GCM/GCMdata/' + variable + '/raw/'
    savepath0 = '/home/yumin/myProgramFiles/DATA/GCM/GCMdata/' + variable + '/processeddata_26by59_points/'
    savepath = savepath0 + variable + '/'

    filenames = [f for f in listdir(filepath) if isfile(join(filepath, f))]
    #filenames = [filenames[0]]

    values_gcms = []
    # if not os.path.exists(savepath0):
    #     os.makedirs(savepath0)
    if savepath and not os.path.exists(savepath):
        os.makedirs(savepath)
    for kk, filename in enumerate(filenames):
        #filename = 'regridded_1deg_pr_amon_inmcm4_historical_r1i1p1_195001-200512.nc'
        dataset = Dataset(filepath + filename, mode='r')
        #dataset = Dataset(filepath+filename,mode='r',format="NETCDF3")
        #dataset = Dataset(filename,mode='r')
        if kk == 0:
            nc_attrs, nc_dims, nc_vars = ncdump(dataset)

        # original longitude is from 0.5 to 359.5 by 1, 360 points
        # original latitude is from -89.5 to 89.5 by 1, 180 points
        # whole USA longitude from [230.5E,304.5E] by 1, 75 points
        # whole USA latitude from [20.5N, 49.5N] by 1, 30 points
        # whole USA longitude from [235.5E,293.5E] by 1, 59 points
        # whole USA latitude from [24.5N, 49.5N] by 1, 26 points

        # original month from 195001 to 200512, 672 points
        # month from 200001 to 200412, 60 months

        time = dataset.variables['time'][:]  # 195001 - 200512
        #lats = dataset.variables['latitude'][110:140] # [20.5N, 49.5N]
        #lons = dataset.variables['longitude'][230:305] # [230.5E, 304.5E]
        lats = dataset.variables['latitude'][114:140]  # [24.5N, 49.5N]
        lons = dataset.variables['longitude'][235:294]  # [235.5E, 293.5E]
        #### whole USA
        ## monthly mean precipitation, unit: mm/day
        #value1_gcm = dataset.variables[variable][:,110:140,230:305]#[month,lat,lon] 195001-200512, totally
        value1_gcm = dataset.variables[
            variable][:, 114:140,
                      235:294]  #[month,lat,lon] 195001-200512, totally

        #value2_gcm = np.ma.filled(value1_gcm,-1.0e-8)
        value2_gcm = np.ma.filled(value1_gcm, 0)
        (Nmon, Nlat, Nlon) = value2_gcm.shape  # [672,26,59]
        value_gcm = np.zeros((Nmon, Nlat, Nlon))  # [672,26,59]
        for t in range(Nmon):
            value_gcm[t, :, :] = np.flipud(
                value2_gcm[t, :, :])  # lats from [49.5N,24.5N]

        #### retangular USA
        ## monthly mean precipitation, unit: mm/day
        #precipitation = dataset.variables['pr'][600:660,123:140,245:278]#[month,lat,lon]
        #prmean_month_gcm = np.ma.filled(precipitation,np.nan)

        if np.isnan(np.sum(value_gcm)):
            print(filename + 'has NAN!\n')
            break
        savename = filename.replace('.nc', '_USA.npy')
        np.save(savepath + savename, value_gcm)
        values_gcms.append(value_gcm)

    values_gcms = np.stack(values_gcms, axis=0)  # [Ngcm,Nmon,Nlat,Nlon]
    print('values_gcms.shape={}'.format(values_gcms.shape))

    time = np.array(time)
    #### whole USA
    # latitude from [20.5N, 49.5N] by 1, 30 points
    # latitude from [24.5N, 49.5N] by 1, 26 points
    #lats_gcm1 = dataset.variables['latitude'][110:140]
    lats_gcm1 = dataset.variables['latitude'][114:140]
    lats_gcm = np.flipud(lats_gcm1)  # lats from [49.5N,24.5N]
    # longitude from [230.5E, 304.5E] by 1, 75 points
    # longitude from [234.5E, 295.5E] by 1, 62 points
    #lons_gcm = dataset.variables['longitude'][230:305]
    lons_gcm = dataset.variables['longitude'][235:294]
    lats_gcm = np.array(lats_gcm)
    lons_gcm = np.array(lons_gcm)
    np.save(savepath0 + 'time_gcm.npy', time)
    np.save(savepath0 + 'lats_gcm.npy', lats_gcm)
    np.save(savepath0 + 'lons_gcm.npy', lons_gcm)
    np.save(
        savepath0 + '{}gcms_{}_monthly_1by1_195001-200512_USA.npy'.format(
            len(filenames), variable), values_gcms)
    print('time=\n{}'.format(time))
    print('lats_gcm=\n{}'.format(lats_gcm))
    print('lons_gcm=\n{}'.format(lons_gcm))
def readPRISM():
    '''
    read the .nc format PRISM data produced by zip_to_nc() function and aglined with GCM
    '''
    import numpy as np
    from netCDF4 import Dataset
    import os
    from ncdump import ncdump

    variable = 'tmin'  #'tmax'
    #filepath = '/home/yumin/Desktop/DS/DATA/PRISM/monthly/ppt/PRISM_ppt_stable_4kmM3_1950-2005_monthly_total.nc'
    #savepath = '../data/Climate/PRISMdata/processeddata/prism_prmean_monthly_0.04by0.04_original_grid/'
    filepath = '/home/yumin/Desktop/DS/DATA/PRISM/monthly/{}/PRISM_{}_stable_4kmM3_1950-2005_monthly_total.nc'.format(
        variable, variable)
    savepath = '/home/yumin/Desktop/DS/DATA/PRISM/PRISMdata/{}/processeddata/prism_{}_monthly_0.04by0.04_original_grid/'.format(
        variable, variable)
    ##%%
    dataset = Dataset(filepath, mode='r')
    nc_attrs, nc_dims, nc_vars = ncdump(dataset)

    # original latitude from 49.9375N to 24.1042N by 1/24 (~0.04), 621 points
    # latitude from N to N by ,  points
    lats_o = dataset.variables['lat'][:]
    # original longitude from -125.021 to -66.5208 by 1/24 (~0.04), 1405 points
    # original longitude from 234.979E to 293.479E by 1/24 (~0.04), 1405 points
    lons_o = dataset.variables['lon'][:] + 360
    # time from 195001 to 200512 by 1, 672 points
    time = dataset.variables['time']  #
    time = list(time)

    variable_o = dataset.variables[
        variable][:]  #maximum temperature, [month,lat,lon], unit: degree Celsius?
    variable_month = np.ma.filled(
        variable_o, 0)  #/30.0 # daily mean precipitation, unite: mm/day
    #variable_month = variable_month[0:5,:,:]
    #import matplotlib.pyplot as plt
    #fig = plt.figure()
    #plt.imshow(variable_month[1,:,:])

    ##%% align with GCM grid
    ## latitude from [49.9167N,24.125N] by 1/24 (~0.04), 620 points
    ## longitude from [235E,293.458E] by 1/24 (~0.04), 1404 points
    lats = np.zeros(len(lats_o) - 1, )
    lons = np.zeros(len(lons_o) - 1, )
    for i in range(len(lats_o) - 1):
        lats[i] = 0.5 * (lats_o[i] + lats_o[i + 1])
    for i in range(len(lons_o) - 1):
        lons[i] = 0.5 * (lons_o[i] + lons_o[i + 1])
    (Nmon, Nlat, Nlon) = variable_month.shape
    variable_month_prism = np.zeros(
        (Nmon, Nlat - 1, Nlon - 1))  # [672, 620, 1404]
    for t in range(Nmon):
        for i in range(Nlat - 1):
            for j in range(Nlon - 1):
                variable_month_prism[t, i, j] = 0.25 * (
                    variable_month[t, i, j] + variable_month[t, i + 1, j] +
                    variable_month[t, i, j + 1] +
                    variable_month[t, i + 1, j + 1])

    variable_month_prism[variable_month_prism == 0] = 0
    #if np.isnan(np.sum(prmean_month_prism)):
    #    print("Error! nan found!\n")

    ##%%
    if savepath and not os.path.exists(savepath):
        os.makedirs(savepath)
    np.save(savepath + 'lons_prism.npy', lons)
    np.save(savepath + 'lats_prism.npy', lats)
    np.save(savepath + 'time_prism.npy', time)
    np.save(
        savepath + 'prism_{}_monthly_195001-200512_USA.npy'.format(variable),
        variable_month_prism)
    print('time=\n{}'.format(time))
    print('lats=\n{}'.format(lats))
    print('lons=\n{}'.format(lons))
def readElevation():
    #### read the PRISM elevation data and aglined with GCM
    import numpy as np
    from netCDF4 import Dataset
    import os
    from ncdump import ncdump

    #%%
    #prefix = '/home/yumin/Desktop/DS/'
    #prefix = '/scratch/wang.zife/YuminLiu/'
    #datapath = prefix+'myPythonFiles/Downscaling/data/Climate/PRISMdata/raw/wcs_4km_prism.nc'
    datapath = '/home/yumin/myProgramFiles/DATA/PRISM/PRISMdata/elevation/raw/wcs_4km_prism.nc'
    savepath = '/home/yumin/myProgramFiles/DATA/PRISM/PRISMdata/elevation/processeddata/'  #None
    dataset = Dataset(datapath, mode='r')
    nc_attrs, nc_dims, nc_vars = ncdump(dataset)

    ## orginal latitude from [49.9375N, 24.1041N], by 1/24 (~0.04), 621 points
    ## orginal longitude from [-125.021, -66.521], by 1/24 (~0.04), 1405 points
    ## orginal longitude from [234.979E, 293.479E], by 1/24 (~0.04), 1405 points
    lats_o = dataset.variables['lat'][:]
    lons_o = dataset.variables['lon'][:] + 360
    elevation_o = dataset.variables['Band1'][:]
    print('elevation_o.min()={}, elevation_o.max()={}'.format(
        elevation_o.min(), elevation_o.max()))
    #import matplotlib.pyplot as plt
    #fig = plt.figure()
    #plt.imshow(elevation_o)
    #plt.show()

    #%% align with GCM grid
    ## latitude from [49.9167N,24.125N] by 1/24 (~0.04), 620 points
    ## longitude from [235E,293.458E] by 1/24 (~0.04), 1404 points
    lats = np.zeros(len(lats_o) - 1, )
    lons = np.zeros(len(lons_o) - 1, )
    for i in range(len(lats_o) - 1):
        lats[i] = 0.5 * (lats_o[i] + lats_o[i + 1])
    for i in range(len(lons_o) - 1):
        lons[i] = 0.5 * (lons_o[i] + lons_o[i + 1])
    (Nlat, Nlon) = elevation_o.shape
    elevation = np.zeros((Nlat - 1, Nlon - 1))  # [620,1404]
    for i in range(Nlat - 1):
        for j in range(Nlon - 1):
            elevation[i,
                      j] = 0.25 * (elevation_o[i, j] + elevation_o[i + 1, j] +
                                   elevation_o[i, j + 1] +
                                   elevation_o[i + 1, j + 1])
    if np.isnan(np.sum(elevation)):
        print("Error! nan found!\n")
    print('elevation.min()={}, elevation.max()={}'.format(
        elevation.min(), elevation.max()))

    #%%
    #if savepath and not os.path.exists(savepath):
    #    os.makedirs(savepath)
    #np.save(savepath+'lons_prism.npy',lons)
    #np.save(savepath+'lats_prism.npy',lats)
    #np.save(savepath+'prism_elevation_USA.npy',elevation)
    #import matplotlib.pyplot as plt
    #fig = plt.figure()
    #plt.imshow(elevation)
    #plt.show()

    #%%
    def savedata(savepath, savename, lats, lons, data):
        if savepath and not os.path.exists(savepath):
            os.makedirs(savepath)
        #np.save(savepath+'lons_prism.npy',lons)
        #np.save(savepath+'lats_prism.npy',lats)
        np.save(savepath + 'prism_elevation_{}_USA.npy'.format(savename), data)

    ## latitude from [49.5N,24.5N]
    ## longitude from [235.5E,293.5E]
    lats_24 = lats[10:611]
    lons_24 = lons[12:]
    elevation_24 = elevation[10:611, 12:]  # [lat,lon]
    #elevation_24[elevation_24<0] = 0.0
    #savedata(savepath+'prism_prmean_monthly_0.04by0.04/','0.04by0.04',lats_24,lons_24,elevation_24)
    savedata(savepath, '0.04by0.04', lats_24, lons_24, elevation_24)
    import matplotlib.pyplot as plt
    fig = plt.figure()
    plt.imshow(elevation_24)
    #plt.savefig(savepath+'prism_prmean_monthly_0.04by0.04/elevation_prism_24.png',dpi=1200,bbox_inches='tight')
    plt.savefig(savepath + 'elevation_prism_24.png',
                dpi=1200,
                bbox_inches='tight')
    #plt.show()

    ## GCM is 26 by 59
    ## 8x is 208 by 472
    ## 4x is 104 by 236
    ## 2x is 52 by 118
    from skimage.transform import resize
    lats_8 = np.linspace(lats_24[0], lats_24[-1], num=208)
    lons_8 = np.linspace(lons_24[0], lons_24[-1], num=472)
    elevation_8 = resize(elevation_24, (208, 472),
                         order=1,
                         preserve_range=True)
    #elevation_8[elevation_8<0] = 0.0
    #savedata(savepath+'prism_prmean_monthly_0.125by0.125/','0.125by0.125',lats_8,lons_8,elevation_8)
    savedata(savepath, '0.125by0.125', lats_8, lons_8, elevation_8)
    #import matplotlib.pyplot as plt
    fig = plt.figure()
    plt.imshow(elevation_8)
    #plt.savefig(savepath+'prism_prmean_monthly_0.125by0.125/elevation_prism_8.png',dpi=1200,bbox_inches='tight')
    plt.savefig(savepath + 'elevation_prism_8.png',
                dpi=1200,
                bbox_inches='tight')
    #plt.show()

    lats_4 = np.linspace(lats_24[0], lats_24[-1], num=104)
    lons_4 = np.linspace(lons_24[0], lons_24[-1], num=236)
    elevation_4 = resize(elevation_24, (104, 236),
                         order=1,
                         preserve_range=True)
    #elevation_4[elevation_4<0] = 0.0
    #savedata(savepath+'prism_prmean_monthly_0.25by0.25/','0.25by0.25',lats_4,lons_4,elevation_4)
    savedata(savepath, '0.25by0.25', lats_4, lons_4, elevation_4)
    #import matplotlib.pyplot as plt
    fig = plt.figure()
    plt.imshow(elevation_4)
    #plt.savefig(savepath+'prism_prmean_monthly_0.25by0.25/elevation_prism_4.png',dpi=1200,bbox_inches='tight')
    plt.savefig(savepath + 'elevation_prism_4.png',
                dpi=1200,
                bbox_inches='tight')
    #plt.show()

    lats_2 = np.linspace(lats_24[0], lats_24[-1], num=52)
    lons_2 = np.linspace(lons_24[0], lons_24[-1], num=118)
    elevation_2 = resize(elevation_24, (52, 118), order=1, preserve_range=True)
    #elevation_2[elevation_2<0] = 0.0
    #savedata(savepath+'prism_prmean_monthly_0.5by0.5/','0.5by0.5',lats_2,lons_2,elevation_2)
    savedata(savepath, '0.5by0.5', lats_2, lons_2, elevation_2)
    #import matplotlib.pyplot as plt
    fig = plt.figure()
    plt.imshow(elevation_2)
    #plt.savefig(savepath+'prism_prmean_monthly_0.5by0.5/elevation_prism_2.png',dpi=1200,bbox_inches='tight')
    plt.savefig(savepath + 'elevation_prism_2.png',
                dpi=1200,
                bbox_inches='tight')
    #plt.show()

    lats_1 = np.linspace(lats_24[0], lats_24[-1], num=26)
    lons_1 = np.linspace(lons_24[0], lons_24[-1], num=59)
    elevation_1 = resize(elevation_24, (26, 59), order=1, preserve_range=True)
    #elevation_1[elevation_1<0] = 0.0
    #savedata(savepath+'prism_prmean_monthly_1.0by1.0/','1.0by1.0',lats_1,lons_1,elevation_1)
    savedata(savepath, '1.0by1.0', lats_1, lons_1, elevation_1)
    #import matplotlib.pyplot as plt
    fig = plt.figure()
    plt.imshow(elevation_1)
    #plt.savefig(savepath+'prism_prmean_monthly_1.0by1.0/elevation_prism_1.png',dpi=1200,bbox_inches='tight')
    plt.savefig(savepath + 'elevation_prism_1.png',
                dpi=1200,
                bbox_inches='tight')
    lon = dataset.variables['lon'][:]
    plev = dataset.variables['plev'][:]
    time = dataset.variables['time'][:]
    cli = dataset.variables['cli'][:]

    ax = plt.subplot(splt_param)
    ax.set_title(title)
    plt.xlabel("Pressure (hPa)")
    plt.ylabel("Cloud ice level (kg/kg)")

    # will be an array [plev] -> avg 
    cli_profile=[]
    # calc mean for lons 
    for i in range(plev.size):
        cli_profile.append(np.mean(cli[i]))
    plt.gca().invert_yaxis()
    ax.plot(cli_profile, plev, marker='o')
    

nc3 = Dataset("GFDL_am3_cli_2009_01.nc", "r", format="NETCDF3_CLASSIC")
nc4 = Dataset("GFDL_am4_cli_2009_01.nc", "r", format="NETCDF3_CLASSIC")
ncdump.ncdump(nc3)
ncdump.ncdump(nc4)

plot_cli_profile(nc3, 121, "AM3 CLI profile")
plot_cli_profile(nc4, 122, "AM4 CLI profile")

fig.tight_layout()

plt.show()
Beispiel #8
0
            vmin = lo_cli, vmax = hi_cli)
    """
    # per-map scale
    disp = ax.contourf(lon, lat, cli[ind], cmap=cmap, levels=8)

    cbar = plt.colorbar(disp, ax=ax, format='%.0e')
    cbar.set_label("cli (kg/kg)")
    return disp


fig.suptitle("Cli at Different Altitudes")

am3 = Dataset("GFDL_am3_cli_2009_01.nc", "r", format="NETCDF3_CLASSIC")
am4 = Dataset("GFDL_am4_cli_2009_01.nc", "r", format="NETCDF3_CLASSIC")

ncdump.ncdump(am3)
ncdump.ncdump(am4)

pre_process(am3.variables['cli'])
pre_process(am4.variables['cli'])

plot_for_cb = \
plot_lat_lon(am3, 321, "am3 150 plev", 150)
plot_lat_lon(am3, 323, "am3 600 plev", 600)
plot_lat_lon(am3, 325, "am3 900 plev", 900)

plot_lat_lon(am4, 322, "am4 150 plev", 150)
plot_lat_lon(am4, 324, "am4 600 plev", 600)
plot_lat_lon(am4, 326, "am5 900 plev", 900)
"""
1 Bar for all plots
Beispiel #9
0
        msg = msg_header
        print '\n**********************'
        print 'info:'
        print '%s  P-traces\n%s  SH-traces' %(len(all_p_data), len(all_sh_data))
        print '**********************'
        msg += '%s  P-traces\n%s  SH-traces\n' %(len(all_p_data), len(all_sh_data))
        msg += msg_p 
        for _i in xrange(len(all_p_data)): msg += all_p_data[_i][-1]
        msg += msg_sh 
        for _i in xrange(len(all_sh_data)): msg += all_sh_data[_i][-1]
        innastats_open = open(os.path.join(e_add.split('/')[-2], 
                                                'infiles', 'in.na.stats'), 'w')
        innastats_open.write(msg)
        innastats_open.close()
       
        ncdump(os.path.join(e_add.split('/')[-2], 'infiles'), 
                            all_p_data, all_sh_data)
        
        if len(all_p_data) != 0 and len(all_sh_data) != 0: 
            if inp.map: mapper(all_p_data, all_sh_data, 
                                address=os.path.join(e_add.split('/')[-2], 'infiles')) 
            if inp.plot_azi: plot_azi(all_p_data, all_sh_data, 
                                address=os.path.join(e_add.split('/')[-2], 'infiles')) 

########################################################################
########################################################################
########################################################################

def main():
    t1_pro = time.time()
    status = PyNASTF()
    print "\n============================="
Beispiel #10
0
        Is = range(np.shape(time)[0])
    dat = np.take(data, Is, axis=0)
    sig = np.take(signal, Is)
    alph = alphas(dat[:, mask], sig, mask)
    regr = np.tensordot(sig, alph, axes=0)
    residuals = dat - regr
    cont = contribution(dat, residuals, lats)
    if (R):
        return alph, cont, residuals, space_contribution(dat, residuals)
    else:
        return alph, cont


nc_fid = Dataset('ssta-ver4.nc', 'r')
#nc_li = Dataset('LImask.nc', 'r')
nc_attrs, nc_dims, nc_vars = nc.ncdump(nc_fid)
#mask_vars= nc.ncdump(nc_li)[2]
lats = nc_fid.variables['Y'][:]
lons = nc_fid.variables['X'][:]
time = nc_fid.variables['T'][:]
sst = nc_fid.variables['anom'][:]
#mask = nc_li.variables['mask'][:]
'''
sst=np.array(sst)
ssta=sst[:,0,:]
nt,nlat,nlon = np.shape(ssta)

mask = ~ma.masked_values (ssta[0,:], -999).mask
'''

co2 = np.loadtxt('co2.txt')[:, 5]