Exemple #1
0
    def spatial_correlation(self,
                            field_A,
                            field_B,
                            method=None,
                            selection=None):
        """ correlate two 2D fields """
        if np.shape(field_A) != np.shape(field_B):  # have to regrid
            A, B = self.regrid_to_lower_resolution(field_A, field_B)
        else:
            A, B = field_A, field_B
        assert np.shape(A) == np.shape(B)
        domain = self.determine_domain(A)

        AREA = xr_AREA(domain)
        MASK = boolean_mask(domain=domain, mask_nr=0)
        if type(selection) == int:
            MASK = boolean_mask(domain=domain, mask_nr=selection)
        elif type(selection) == dict:
            MASK, AREA = MASK.sel(selection), AREA.sel(selection)
            A, B = A.sel(selection), B.sel(selection)

        D = np.any(np.array(
            [np.isnan(A).values,
             np.isnan(B).values, (MASK == 0).values]),
                   axis=0)
        A = xr.where(D, np.nan,
                     A).stack(z=('latitude', 'longitude')).dropna(dim='z')
        B = xr.where(D, np.nan,
                     B).stack(z=('latitude', 'longitude')).dropna(dim='z')
        C = xr.where(D, np.nan,
                     AREA).stack(z=('latitude', 'longitude')).dropna(dim='z')
        d = DescrStatsW(np.array([A.values, B.values]).T, weights=C)
        spatial_corr_coef = d.corrcoef[0, 1]

        return spatial_corr_coef
Exemple #2
0
    def SST_pointwise_detrending(self, run, tavg='yrly', degree=2, time=None):
        """ calculates the trends of ocean fields

        input:
        SST    .. xr DataArray
        degree .. degree of polynomial to remove

        output:
        SST_dt .. pointwise detrended SST field

        7 secs for lpd run, 40 seconds
        """
        print('detrending SST pointwise')
        assert degree in [1, 2]
        if run in ['ctrl', 'rcp']: MASK = boolean_mask('ocn', 0)
        elif run in ['lpi', 'lpd']: MASK = boolean_mask('ocn_low', 0)
        (jm, im) = MASK.shape
        fn = f'{path_prace}/SST/SST_{tavg}_{run}.nc'
        SST = self.select_time(
            xr.open_dataarray(fn, decode_times=False).where(MASK), time)
        SST = SST.where(MASK > 0).fillna(-9999)
        Nt = SST.values.shape[0]
        A = SST.values.reshape((Nt, im * jm))

        SST_pf = np.polyfit(SST.time, A, degree)

        pf0 = A[0, :].copy()
        pf1 = A[0, :].copy()
        pf0 = SST_pf[0, :]
        pf1 = SST_pf[1, :]
        if degree == 1:
            # SST_dt = pf0*SST.time - pf1
            detrend_signal = 'linear'
        elif degree == 2:
            pf2 = A[0, :].copy()
            pf2 = SST_pf[2, :]
            A_dt = np.expand_dims(SST.time**2             , 1).dot(np.expand_dims(SST_pf[0,:], 0)) \
                 + np.expand_dims(SST.time                , 1).dot(np.expand_dims(SST_pf[1,:], 0)) \
                 + np.expand_dims(np.ones((len(SST.time))), 1).dot(np.expand_dims(SST_pf[2,:], 0))
            # detrend_signal = 'quadratic'
        dt = 'pwdt'  # pointwise detrended

        fn_new = f'{path_prace}/SST/SST_yrly_{dt}_{run}_{time[0]}_{time[1]}.nc'
        SST_dt = SST.copy()
        SST_dt.values = (A - A_dt).reshape((Nt, jm, im))
        SST_dt.to_netcdf(fn_new)
        print(f'created {fn_new}')
        return
Exemple #3
0
 def all_advection_cells(self, domain):
     fn = f'{path_prace}/OHC/advection_cells_{domain}.nc'
     try:
         assert os.path.exists(fn), f'{fn} does not exist'
         adv = xr.open_dataset(fn)
     except:
         for i, pair in enumerate(tqdm(neighbours)):
             from_basin_mask = boolean_mask(domain, pair[0])
             to_basin_mask = boolean_mask(domain, pair[1])
             name = f'{regions_dict[pair[0]]}_to_{regions_dict[pair[1]]}'
             adv_E, adv_N = self.advection_cells(from_basin_mask,
                                                 to_basin_mask)
             adv_E.name = f'adv_E_{name}'
             adv_N.name = f'adv_N_{name}'
             if i == 0: adv = xr.merge([adv_N, adv_E])
             else: adv = xr.merge([adv, adv_N, adv_E])
         adv.to_netcdf(fn)
     return adv
Exemple #4
0
def ocn_field_regression(xa, run):
    """ calculates the trends of ocean fields
    
    input:
    xa      .. xr DataArray
    
    output:
    da_trend .. 2D xr DataArray with linear trends
    
    (takes about 40 seconds)
    """
    print(f'started at\n{datetime.datetime.now()}')

    assert type(xa) == xr.core.dataarray.DataArray
    assert len(xa.values.shape) == 3
    # assert xa.values.shape[1:]==(jmt,imt)

    if run in ['ctrl', 'rcp']:
        MASK = boolean_mask('ocn', 0)
    elif run in ['lpi', 'lpd', 'lc1', 'lr1', 'lr2', 'ld']:
        MASK = boolean_mask('ocn_low', 0)
    (jm, im) = MASK.shape
    xa = xa.where(MASK > 0).fillna(-9999)

    xa_slope = xa[0, :, :].copy()
    xa_interc = xa[0, :, :].copy()
    Nt = xa.values.shape[0]
    A = xa.values.reshape((Nt, im * jm))

    xa_lin = np.polyfit(xa.time, A, 1)
    xa_slope.values = xa_lin[0, :].reshape((jm, im))  # slope; [xa unit/time]
    xa_slope = xa_slope.where(MASK > 0)

    xa_interc.values = xa_lin[1, :].reshape((jm, im))  # intercept; [xa unit]
    xa_interc = xa_interc.where(MASK > 0)

    return xa_slope, xa_interc
Exemple #5
0
 def plot_OHC_anomaly(self):
     """"""
     ctrl_qd = xr.open_dataset(f'{path_samoc}/OHC/OHC_integrals_ctrl_qd.nc', decode_times=False)
     lpd_qd  = xr.open_dataset(f'{path_samoc}/OHC/OHC_integrals_lpd_qd.nc' , decode_times=False)
     
     maxv = []
     for j, depths in enumerate([(0,6000), (0,100), (0,700), (700,2000)]):
         key = f'OHC_vertical_{depths[0]}_{depths[1]}m'
         maxv.append(np.max([np.abs(ctrl_qd[key]).max(), np.abs(lpd_qd[key]).max()])/4)
     print(maxv)
     
     for y in range(250):
         f, ax = plt.subplots(4, 3 , figsize=(10,10),
                              gridspec_kw={"width_ratios":[1,1, 0.05]}, 
                              subplot_kw=dict(projection=ccrs.EqualEarth(central_longitude=300)))
         for i, ds in enumerate([ctrl_qd, lpd_qd]):
             name = ['CTRL', 'LPD'][i]
             MASK = boolean_mask(['ocn_rect', 'ocn_low'][i], mask_nr=0)
             if i==0:   X, Y = np.meshgrid(ds.t_lon, ds.t_lat)
             else:      X, Y = ds.TLONG, ds.TLAT
             for j, depths in enumerate([(0,6000), (0,100), (0,700), (700,2000)]):
                 key = f'OHC_vertical_{depths[0]}_{depths[1]}m'
                 im = ax[j,i].pcolormesh(X, Y, ds[key][y,:,:].where(MASK),
                                         transform=ccrs.PlateCarree(),
                                         vmin=-maxv[j], vmax=maxv[j],
                                         cmap=cmocean.cm.balance)
                 ax[j,i].add_feature(cartopy.feature.LAND,
                                     zorder=2, edgecolor='black', facecolor='w')
                 if j==0:
                     year = f'{ds.time.values[y]:3.0f}'
                     ax[0,i].text(.5, 1.1, f'{name} (year {year})',
                                  transform=ax[0,i].transAxes, ha='center')
                 ax[j,i].text(.5, 1.02, ['full depth (0-6000m)', 'surface (0-100m)',
                                         'upper ocean (0-700m)', 'lower ocean (700-2000m)'][j],
                              transform=ax[j,i].transAxes, ha='center')
                 if i==1:
                     cb = f.colorbar(im, ax=ax[j,2], orientation='vertical', label=r'OHC [J/m$^{2}$]')#, ticks=np.arange(-3e16,4e16,1e16))
                     cb.outline.set_visible(False)
         plt.savefig(f'{path_results}/OHC/OHC-video/OHC_vert_qd_ctrl_lpd_{y:03d}')
     return
Exemple #6
0
 def surface_heat_flux(self, run):
     """ total surface heat flux into ocean basins """
     # 32:20 min ctrl
     # 1min 4s lpd
     if run == 'ctrl': domain = 'ocn'
     elif run in ['lc1', 'lpd']: domain = 'ocn_low'
     da = xr.open_mfdataset(f'{path_prace}/{run}/ocn_yrly_SHF_0*.nc',
                            combine='nested',
                            concat_dim='time').SHF
     print(len(da.time))
     AREA = xr_AREA(domain=domain)
     SHF = spy * (da * AREA).sum(dim=['nlat', 'nlon'])
     SHF.name = 'Global_Ocean'
     for nr in tqdm(np.arange(1, 12)):
         MASK = boolean_mask(domain=domain, mask_nr=nr)
         temp = spy * (da * AREA).where(MASK).sum(dim=['nlat', 'nlon'])
         temp.name = regions_dict[nr]
         SHF = xr.merge([SHF, temp])
     SHF.attrs[
         'quantity'] = 'yrly averaged total surface heat flux, positive down'
     SHF.attrs['units'] = '[J/yr]'
     SHF.to_netcdf(f'{path_prace}/OHC/SHF_{run}.nc')
     return
Exemple #7
0
    def generate_OHC_files(self, run, year=None, pwqd=False):
        """ non-detrended OHC files for full length of simulations
        
        One file contains integrals (all global and by basin):
        
        x,y,z .. scalars        
        x,y   .. vertical profiles 
        x     .. "zonal" integrals 
        
        A separate file each for 4 different depth levels
        z     .. 2D maps, global only, but for different vertical levels
 
        # (ocn:      takes about 45 seconds per year: 70 yrs approx 55 mins)
        (ocn:      takes about 14 min per year)
        (ocn_rect: takes about  3 seconds per year: 70 yrs approx 3 mins)
        """
        
        def t2da(da, t):
            """adds time dimension to xr DataArray, then sets time value to t"""
            da = da.expand_dims('time')
            da = da.assign_coords(time=[t])
            return da

        def t2ds(da, name, t):
            """ 
            adds time dimension to xr DataArray, then sets time value to t,
            and then returns as array in xr dataset
            """
            da = t2da(da, t)
            ds = da.to_dataset(name=name)
            return ds
        start = datetime.datetime.now()
        def tss():  # time since start
            return datetime.datetime.now()-start
        print(f'{start}  start OHC calculation: run={run}')
        assert run in ['ctrl', 'rcp', 'lpd', 'lpi']

        if run=='rcp':
            domain = 'ocn'
        elif run=='ctrl':
            domain = 'ocn_rect'
        elif run in ['lpd', 'lpi']:
            domain = 'ocn_low'
            
        (z, lat, lon) = dll_dims_names(domain)

        # geometry
        DZT  = xr_DZ(domain)#.chunk(chunks={z:1})
        AREA = xr_AREA(domain)
        HTN  = xr_HTN(domain)
        LATS = xr_LATS(domain)
        
        def round_tlatlon(das):
            """ rounds TLAT and TLONG to 2 decimals
            some files' coordinates differ in their last digit
            rounding them avoids problems in concatonating
            """
            das['TLAT']   = das['TLAT'].round(decimals=2)
            das['TLONG']  = das['TLONG'].round(decimals=2)
            return das
        if domain=='ocn':
            round_tlatlon(HTN)
            round_tlatlon(LATS)

        MASK = boolean_mask(domain, mask_nr=0)
        DZT  = DZT.where(MASK)#.chunk(chunks={z:1})
        # with chunking for ctrl_rect: 21 sec per iteration, 15 sec without
#         for k in range(42):
#             DZT[k,:,:]  = DZT[k,:,:].where(MASK)
        AREA = AREA.where(MASK)
        HTN  = HTN.where(MASK)
        LATS = LATS.where(MASK)
#         print(f'{datetime.datetime.now()}  done with geometry')
        
        if pwqd:  name = 'TEMP_pwqd'
        else:     name = 'TEMP_PD'
            
#         print(run, domain, name)
        
        for y,m,file in IterateOutputCESM(domain=domain, run=run, tavg='yrly', name=name):
#             print(tss(), y)
            
#             break
            
            if year!=None:  # select specific year
                if year==y:
                    pass
                else:
                    continue
                    
            if pwqd:  file_out = f'{path_samoc}/OHC/OHC_integrals_{run}_{y:04d}_pwqd.nc'
            else:     file_out = f'{path_samoc}/OHC/OHC_integrals_{run}_{y:04d}.nc'
                

            if os.path.exists(file_out) and year is None:
#     #             should check here if all the fields exist
#                 print(f'{datetime.datetime.now()} {y} skipped as files exists already')
#             if y not in [250,251,252,253,254,255,273,274,275]:
                continue
            print(f'{tss()} {y}, {file}')

            t   = y*365  # time in days since year 0, for consistency with CESM date output
#             ds  = xr.open_dataset(file, decode_times=False, chunks={z:1}).TEMP
            ds  = xr.open_dataset(file, decode_times=False).TEMP
            print(f'{tss()} opened dataset')
            if domain=='ocn':
                ds = ds.drop(['ULONG', 'ULAT'])
                ds = round_tlatlon(ds)

#             if ds.PD[0,150,200].round(decimals=0)==0:
#                 ds['PD'] = ds['PD']*1000 + rho_sw
#             elif ds.PD[0,150,200].round(decimals=0)==1:
#                 ds['PD'] = ds['PD']*1000
#             else: 
#                 print('density [g/cm^3] is neither close to 0 or 1')

#             OHC = ds.TEMP*ds.PD*cp_sw
            OHC = ds*rho_sw*cp_sw
            ds.close()
            OHC = OHC.where(MASK)

            OHC_DZT = OHC*DZT
            print(f'{tss()}  {y} calculated OHC & OHC_DZT')
            
            # global, global levels, zonal, zonal levels integrals for different regions
            for mask_nr in tqdm([0,1,2,3,6,7,8,9,10]):
#             for mask_nr in [0,1,2,3,6,7,8,9,10]:
                name = regions_dict[mask_nr]
                da = OHC.where(boolean_mask(domain, mask_nr=mask_nr))
                
                da_g = (da*AREA*DZT).sum(dim=[z, lat, lon])
                da_g.attrs['units'] = '[J]'
                ds_g  = t2ds(da_g , f'OHC_{name}', t)

                da_gl = (da*AREA).sum(dim=[lat, lon])
                da_gl.attrs['units'] = '[J m^-1]'
                ds_gl = t2ds(da_gl, f'OHC_levels_{name}', t)

                if domain=='ocn':  da_z  = xr_int_zonal(da=da, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
                else:  da_z = (da*HTN*DZT).sum(dim=[z, lon])
                da_z.attrs['units'] = '[J m^-1]'
                ds_z = t2ds(da_z , f'OHC_zonal_{name}', t)
                
                if domain=='ocn':  da_zl = xr_int_zonal_level(da=da, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
                else:  da_zl = (da*HTN).sum(dim=[lon])
                da_zl.attrs['units'] = '[J m^-2]'
                ds_zl = t2ds(da_zl, f'OHC_zonal_levels_{name}', t)
                if mask_nr==0:   ds_new = xr.merge([ds_g, ds_gl, ds_z, ds_zl])
                else:            ds_new = xr.merge([ds_new, ds_g, ds_gl, ds_z, ds_zl])
                    
            print(f'{tss()}  done with horizontal calculations')
            
            # vertical integrals
            # full depth
            da_v  = OHC_DZT.sum(dim=z)                         #   0-6000 m
            da_v.attrs = {'depths':f'{OHC_DZT[z][0]-OHC_DZT[z][-1]}',
                          'units':'[J m^-2]'}
            
            if domain in ['ocn', 'ocn_rect']:  zsel = [[0,9], [0,20], [20,26]]
            elif domain=='ocn_low':            zsel = [[0,9], [0,36], [36,45]]
            
            #   0- 100 m
            da_va = OHC_DZT.isel({z:slice(zsel[0][0], zsel[0][1])}).sum(dim=z)  
            da_va.attrs = {'depths':f'{OHC_DZT[z][zsel[0][0]].values:.0f}-{OHC_DZT[z][zsel[0][1]].values:.0f}',
                           'units':'[J m^-2]'}
            
            #   0- 700 m
            da_vb = OHC_DZT.isel({z:slice(zsel[1][0],zsel[1][1])}).sum(dim=z)  
            da_vb.attrs = {'depths':f'{OHC_DZT[z][zsel[1][0]].values:.0f}-{OHC_DZT[z][zsel[1][1]].values:.0f}',
                           'units':'[J m^-2]'}
            
            # 700-2000 m
            da_vc = OHC_DZT.isel({z:slice(zsel[2][0],zsel[2][1])}).sum(dim=z)  
            da_vc.attrs = {'depths':f'{OHC_DZT[z][zsel[2][0]].values:.0f}-{OHC_DZT[z][zsel[2][1]].values:.0f}',
                           'units':'[J m^-2]'}
            
            ds_v  = t2ds(da_v , 'OHC_vertical_0_6000m'  , t)
            ds_va = t2ds(da_va, 'OHC_vertical_0_100m'   , t)
            ds_vb = t2ds(da_vb, 'OHC_vertical_0_700m'   , t)
            ds_vc = t2ds(da_vc, 'OHC_vertical_700_2000m', t)

            ds_new = xr.merge([ds_new, ds_v, ds_va, ds_vb, ds_vc])

            print(f'{tss()}  done making datasets')
#             print(f'output: {file_out}\n')
            ds_new.to_netcdf(path=file_out, mode='w')
            ds_new.close()

#             if y in [2002, 102, 156, 1602]:  break  # for testing only

        # combining yearly files
        
        print(f'{datetime.datetime.now()}  done\n')
        
#         if run=='ctrl':  print('year 205 is wrong and should be averaged by executing `fix_ctrl_year_205()`')
        return
Exemple #8
0
    def SST_remove_forced_signal(self,
                                 run,
                                 tavg='yrly',
                                 detrend_signal='GMST',
                                 time=None):
        """ detrending the SST field
        a) remove the scaled, forced MMEM GMST signal (method by Kajtar et al. (2019)) at each grid point
        b) remove MMEM SST index (Steinman et al. (2015))

        1. load raw SST data
        2. generate forced signal
            model:  fit to GMST
                linear
                quadratic
            observations:
                single-factor CMIP GMST MMEM
                two-factor CMIP all natural and CMIP anthropogenic (= all forcings - all natural)
        3. regression:
            single time series: forced signal onto SST data -> \beta
            two time series:
        4. use regression coefficient \beta to generate SST signal due to forcing
        5. remove that signal

        run            .. CESM simulation name
        tavg           .. time resolution
        detrend_signal .. either GMST (Kajtar et al. (2019))
                          or target region (Steinman et al. (2015))
        time           .. time range selected
        """
        assert run in ['ctrl', 'rcp', 'lpd', 'lpi', 'had']
        assert tavg in ['yrly', 'monthly']
        assert detrend_signal in ['GMST', 'AMO', 'SOM', 'TPI1', 'TPI2', 'TPI3']
        if detrend_signal in ['AMO', 'SOM', 'TPI1', 'TPI2', 'TPI3']:
            assert run == 'had'
        if run == 'had':
            assert time == None

        # file name and domain
        fn = f'{path_prace}/SST/SST_{tavg}_{run}.nc'
        if run in ['ctrl', 'rcp']:
            if tavg == 'yrly':
                domain = 'ocn'
            elif tavg == 'monthly':
                domain = 'ocn_rect'
        elif run in ['lpd', 'lpi']:
            domain = 'ocn_low'
        elif run == 'had':
            domain = 'ocn_had'

        print('load and subselect data')
        MASK = boolean_mask(domain=domain, mask_nr=0, rounded=True)
        SST = self.select_time(xr.open_dataarray(f'{path_prace}/SST/SST_{tavg}_{run}.nc',\
                                                  decode_times=False).where(MASK),
                                time)

        if time != None:
            first_year, last_year = time

        if tavg == 'monthly':  # deseasonalize
            for t in range(12):
                SST[t::12, :, :] -= SST[t::12, :, :].mean(dim='time')
        SST = SST - SST.mean(dim='time')

        print('calculate forced signal')
        forced_signal = self.forcing_signal(run=run,
                                            tavg=tavg,
                                            detrend_signal=detrend_signal,
                                            time=time)

        if detrend_signal == 'GMST':
            print('Kajtar et al. (2019) scaled MMM GMST detrending method')
            if time == None:
                fn = f'{path_prace}/SST/SST_beta_{tavg}_all_{run}.nc'
            else:
                fn = f'{path_prace}/SST/SST_beta_{tavg}_{detrend_signal}_{run}_{first_year}_{last_year}.nc'

            try:
                assert 1 == 0
                assert os.path.exists(fn)
                print('reusing previously calculated beta!')
                print(f'file exists: {fn}')
                beta = xr.open_dataset(fn).slope
            except:
                if run == 'ctrl': SST = SST[40:, :, :]
                beta = ADA().lag_linregress(forced_signal, SST)['slope']
                if run == 'had':
                    beta = xr.where(abs(beta) < 5, beta, np.median(beta))
                ds = xr.merge([forced_signal, beta])
                ds.to_netcdf(fn)

            SST_dt = SST - beta * forced_signal
            SST_dt = SST_dt - SST_dt.mean(dim='time')
            print('test')

            # output name
            if run == 'had':
                dt = 'sfdt'  # single factor detrending
            elif run in ['ctrl', 'lpd', 'rcp']:
                dt = 'sqdt'  # scaled quadratic detrending
            else:
                dt = 'sldt'  # scaled linear detrending

        elif detrend_signal in ['AMO', 'SOM', 'TPI1', 'TPI2', 'TPI3']:
            print('Steinman et al. (2015) method')
            # these indices will be detrended afterwards
            SST_dt = SST - forced_signal
            ds = None
            dt = f'{detrend_signal}dt'

        print('writing output')
        if time == None:
            fn = f'{path_prace}/SST/SST_{tavg}_{dt}_{run}.nc'
        else:
            fn = f'{path_prace}/SST/SST_{tavg}_{dt}_{run}_{first_year}_{last_year}.nc'
        SST_dt.to_netcdf(fn)
        print(f'detrended {run} SST file written out to:\n{fn}')

        # additional two factor detrending for had
        if run == 'had' and tavg == 'yrly':
            self.two_factor_detrending(SST)

        return
Exemple #9
0
def OHC_parallel(run, mask_nr=0):
    """ ocean heat content calculation """
    print('***************************************************')
    print('* should be run with a dask scheduler             *')
    print('`from dask.distributed import Client, LocalCluster`')
    print('`cluster = LocalCluster(n_workers=2)`')
    print('`client = Client(cluster)`')
    print('***************************************************')

    print(
        f'\n{datetime.datetime.now()}  start OHC calculation: run={run} mask_nr={mask_nr}'
    )
    assert run in ['ctrl', 'rcp', 'lpd', 'lpi']
    assert type(mask_nr) == int
    assert mask_nr >= 0 and mask_nr < 13

    #     file_out = f'{path_samoc}/OHC/OHC_test.nc'
    file_out = f'{path_samoc}/OHC/OHC_integrals_{regions_dict[mask_nr]}_{run}.nc'

    if run in ['ctrl', 'rcp']:
        domain = 'ocn'
    elif run in ['lpd', 'lpi']:
        domain = 'ocn_low'

    MASK = boolean_mask(domain, mask_nr)

    # geometry
    DZT = xr_DZ(domain)
    AREA = xr_AREA(domain)
    HTN = xr_HTN(domain)
    LATS = xr_LATS(domain)
    print(f'{datetime.datetime.now()}  done with geometry')

    # multi-file
    file_list = ncfile_list(domain='ocn', run=run, tavg='yrly', name='TEMP_PD')
    OHC = xr.open_mfdataset(paths=file_list,
                            concat_dim='time',
                            decode_times=False,
                            compat='minimal',
                            parallel=True).drop(['ULAT', 'ULONG'
                                                 ]).TEMP * cp_sw * rho_sw
    if mask_nr != 0:
        OHC = OHC.where(MASK)
    print(f'{datetime.datetime.now()}  done loading data')

    for ds in [OHC, HTN, LATS]:
        round_tlatlon(ds)
    OHC_DZT = OHC * DZT
    print(f'{datetime.datetime.now()}  done OHC_DZT')

    # xr DataArrays
    da_g = xr_int_global(da=OHC, AREA=AREA, DZ=DZT)
    da_gl = xr_int_global_level(da=OHC, AREA=AREA, DZ=DZT)
    da_v = OHC_DZT.sum(dim='z_t')  #xr_int_vertical(da=OHC, DZ=DZT)
    da_va = OHC_DZT.isel(z_t=slice(0, 9)).sum(dim='z_t')  # above 100 m
    da_vb = OHC_DZT.isel(z_t=slice(9, 42)).sum(dim='z_t')  # below 100 m
    da_z = xr_int_zonal(da=OHC, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
    da_zl = xr_int_zonal_level(da=OHC, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
    print(f'{datetime.datetime.now()}  done calculations')

    # xr Datasets
    ds_g = da_g.to_dataset(name='OHC_global')
    ds_gl = da_gl.to_dataset(name='OHC_global_levels')
    ds_v = da_v.to_dataset(name='OHC_vertical')
    ds_va = da_va.to_dataset(name='OHC_vertical_above_100m')
    ds_vb = da_vb.to_dataset(name='OHC_vertical_below_100m')
    ds_z = da_z.to_dataset(name='OHC_zonal')
    ds_zl = da_zl.to_dataset(name='OHC_zonal_levels')
    print(f'{datetime.datetime.now()}  done dataset')

    print(f'output: {file_out}')

    ds_new = xr.merge([ds_g, ds_gl, ds_z, ds_zl, ds_v, ds_va, ds_vb])
    ds_new.to_netcdf(path=file_out, mode='w')
    #     ds_new.close()
    print(f'{datetime.datetime.now()}  done\n')

    return ds_new
Exemple #10
0
import sys
import datetime
sys.path.append("..")
import xarray as xr
from paths import path_prace
from regions import boolean_mask, Atlantic_mask
from timeseries import IterateOutputCESM
from xr_DataArrays import xr_AREA


for j, run in enumerate(['ctrl','lc1']):
    # if j==1: break
    domain = ['ocn', 'ocn_low'][j]
    TAREA = xr_AREA(domain=domain)
    mask_A = Atlantic_mask(domain=domain)
    mask_P = boolean_mask(domain=domain, mask_nr=2)
    mask_S = boolean_mask(domain=domain, mask_nr=1)
    shf, shf_A, shf_P, shf_S = [], [], [], []
    for i, (y,m,f) in enumerate(IterateOutputCESM(domain=domain, run=run, tavg='monthly')):
        da = xr.open_dataset(f, decode_times=False).SHF*TAREA
        shf.append(da.sum())
        shf_A.append(da.where(mask_A).sum())
        shf_P.append(da.where(mask_P).sum())
        shf_S.append(da.where(mask_S).sum())
        # if i==24:  break
    shf = xr.concat(shf, dim='time')
    shf_A = xr.concat(shf_A, dim='time')
    shf_P = xr.concat(shf_P, dim='time')
    shf_S = xr.concat(shf_S, dim='time')
    shf.name = 'Global_Ocean'
    shf_A.name = 'Atlantic_Ocean'
Exemple #11
0
import matplotlib.pyplot as plt

matplotlib.use('Agg')

start = int(sys.argv[1])
end   = int(sys.argv[2])

sys.path.append("..")
from paths import path_data, path_results
from paths import file_ex_ocn_ctrl, file_ex_ocn_lpd
from paths import file_RMASK_ocn, file_RMASK_ocn_rect
from regions import AMO_mask, boolean_mask

ctrl_daily = xr.open_mfdataset('/projects/0/prace_imau/prace_2013081679/cesm1_0_4/spinup_pd_maxcores_f05_t12/OUTPUT/ocn/hist/daily/spinup_pd_maxcores_f05_t12.pop.h.nday1.0300*.nc', combine='nested', concat_dim='time').SST

MASK = boolean_mask(domain='ocn', mask_nr=0) + boolean_mask(domain='ocn', mask_nr=-13) + boolean_mask(domain='ocn', mask_nr=-14)

cmap = plt.cm.Spectral_r
cmap.set_under('w')
daterange = pd.date_range('2018-01-01', '2018-12-31')
for i in range(365):
#     fn =f'{path_results}/NAC_presentation/global_daily_SST_ctrl_{i:03d}.png'
    fn =f'{path_results}/NAC_presentation/NA_daily_SST/NA_daily_SST_ctrl_black_{i:03d}.png'
    if i not in np.arange(start,end):  continue
    if os.path.exists(fn):  continue
#     fig = plt.figure(figsize=(15, 7))
#     ax = plt.axes(projection=ccrs.PlateCarree())
    
    fig = plt.figure(figsize=(7, 7))
    fig.patch.set_facecolor('k')
    ax = plt.axes(projection=ccrs.NearsidePerspective(central_longitude=-40.0, central_latitude=40.0))
Exemple #12
0
def regr_map(ds, index, run, fn=None):
    """ map of regression slope with 95% significance countours and SST index polygons """
    if run in ['ctrl', 'rcp']: domain = 'ocn'
    elif run in ['lpd', 'lpi']: domain = 'ocn_low'
    elif run == 'had': domain = 'ocn_had'
    MASK = boolean_mask(domain=domain, mask_nr=0)

    xa = ds.slope.where(MASK)
    if domain in ['ocn', 'ocn_low']:
        xa = xa.assign_coords(TLONG=ds.TLONG)

    if index in ['AMO', 'SOM']:
        rects = rect_polygon(SST_index_bounds(index))
        clon = 300
        nv = .4
    elif index in ['PDO', 'IPO']:
        rects = rect_polygon(SST_index_bounds(index))
        clon = 200
        nv = .4
    elif index == 'TPI':
        rects = [
            rect_polygon(SST_index_bounds('TPI1')),
            rect_polygon(SST_index_bounds('TPI2')),
            rect_polygon(SST_index_bounds('TPI3')),
        ]
        clon = 200
        nv = .3

    # choose two-tailed 95% significance level
    # as boolean map
    sig = ds.pval  #.where(MASK)
    #     tail1 = np.where(sig<0.025, 1, 0)
    tail1 = np.where(sig < 0.005, 1, 0)
    #     tail2 = np.where(sig>0.975, 1, 0)
    tail2 = np.where(sig > 99.5, 1, 0)
    sig.values = tail1 + tail2
    #     if run in ['ctrl', 'rcp', 'had']:   sig = sig.where(MASK)

    proj = 'rob'
    cm = discrete_cmap(16, cmocean.cm.balance)
    label = 'regression slope [K/K]'
    text1 = f'SST({index})\nregr.'
    if run == 'had':
        text2 = f'{run.upper()}\n{ds.first_year+1870}-\n{ds.last_year+1870}'
    elif run in ['ctrl', 'lpd']:
        text2 = f'{run.upper()}\n{ds.first_year}-\n{ds.last_year}'
    if run in ['ctrl', 'rcp']:
        domain = 'ocn_T'

    f, ax = make_map(xa=xa,
                     domain=domain,
                     proj=proj,
                     cmap=cm,
                     minv=-nv,
                     maxv=nv,
                     label=label,
                     filename=fn,
                     text1=text1,
                     text2=text2,
                     rects=rects,
                     sig=sig,
                     clon=clon)