Example #1
0
    def derive_SST_avg_index(self, run, index, dsdt='ds_dt', time=None):
        """ generates all area avg indices from detrended SST data """
        assert run in ['ctrl', 'rcp', 'lpd', 'lpi', 'had']
        assert time is None or len(time)==2
        if run=='had':                domain, dims, ts = 'ocn_had' , ('latitude', 'longitude'), ''
        elif run in ['ctrl', 'rcp']:  domain, dims, ts = 'ocn_rect', ('t_lat', 't_lon'), f'_{time[0]}_{time[1]}'
        elif run in ['lpd', 'lpi']:   domain, dims, ts = 'ocn_low' , ('nlat', 'nlon')  , f'_{time[0]}_{time[1]}'

        fn_monthly = f'{path_prace}/SST/SST_monthly_{dsdt}_{run}{ts}.nc'
        SST_monthly = xr.open_dataarray(fn_monthly, decode_times=False)
        
        if index in ['AMO', 'SOM', 'SMV']:
            blats, blons, mask_nr = self.bounding_lats_lons(index)
            MASK = mask_box_in_region(domain=domain, mask_nr=mask_nr, bounding_lats=blats, bounding_lons=blons)
            AREA = xr_AREA(domain=domain).where(MASK)
            SST_index = self.SST_area_average(xa_SST=SST_monthly, AREA=AREA, AREA_index=AREA.sum(), MASK=MASK, dims=dims)
            
        if index=='TPI':  # monthly data
            for i, TPI_i in enumerate(['TPI1', 'TPI2', 'TPI3']):
                blats, blons, mask_nr = self.bounding_lats_lons(TPI_i)
                MASK = mask_box_in_region(domain=domain, mask_nr=mask_nr, bounding_lats=blats, bounding_lons=blons)
                AREA = xr_AREA(domain=domain).where(MASK)
                TPI_ = self.SST_area_average(xa_SST=SST_monthly, AREA=AREA, AREA_index=AREA.sum(), MASK=MASK, dims=dims)
                TPI_.to_netcdf(f'{path_prace}/SST/{TPI_i}_ds_dt_raw_{run}{ts}.nc')
                if i==0:    SST_index = -0.5*TPI_
                elif i==1:  SST_index = SST_index + TPI_
                elif i==2:  SST_index = SST_index - 0.5*TPI_
        SST_index.to_netcdf(f'{path_prace}/SST/{index}_{dsdt}_raw_{run}{ts}.nc')
            
        return SST_index
Example #2
0
    def spatial_correlation(self,
                            field_A,
                            field_B,
                            method=None,
                            selection=None):
        """ correlate two 2D fields """
        if np.shape(field_A) != np.shape(field_B):  # have to regrid
            A, B = self.regrid_to_lower_resolution(field_A, field_B)
        else:
            A, B = field_A, field_B
        assert np.shape(A) == np.shape(B)
        domain = self.determine_domain(A)

        AREA = xr_AREA(domain)
        MASK = boolean_mask(domain=domain, mask_nr=0)
        if type(selection) == int:
            MASK = boolean_mask(domain=domain, mask_nr=selection)
        elif type(selection) == dict:
            MASK, AREA = MASK.sel(selection), AREA.sel(selection)
            A, B = A.sel(selection), B.sel(selection)

        D = np.any(np.array(
            [np.isnan(A).values,
             np.isnan(B).values, (MASK == 0).values]),
                   axis=0)
        A = xr.where(D, np.nan,
                     A).stack(z=('latitude', 'longitude')).dropna(dim='z')
        B = xr.where(D, np.nan,
                     B).stack(z=('latitude', 'longitude')).dropna(dim='z')
        C = xr.where(D, np.nan,
                     AREA).stack(z=('latitude', 'longitude')).dropna(dim='z')
        d = DescrStatsW(np.array([A.values, B.values]).T, weights=C)
        spatial_corr_coef = d.corrcoef[0, 1]

        return spatial_corr_coef
Example #3
0
def make_SALT_vol_integrals(run):
    """ calculate SALT volume integrals for specific regions from depth integrated SALT*DZT maps
    [g->kg] * (SALT*DZT).sum('z_t') * rho_w * \int \int dx dy
    1kg/1000g * g_S/kg_W*m * 1000kg_W/1m^3 * m^2 = kg

    output:
    netcdf with timeseries and trends for all depth levels
    """
    # Atlantic + Labrador + GIN, no Med
    dd, ddd = {}, []
    if run in ['ctrl', 'rcp']: AREA = xr_AREA('ocn')
    elif run in ['lpd', 'lr1']: AREA = xr_AREA('ocn_low')
    dm = [
        xr.open_dataarray(f'{path_prace}/SALT/SALT_dz_0-100m_{run}.nc'),
        xr.open_dataarray(f'{path_prace}/SALT/SALT_dz_0-1000m_{run}.nc'),
        xr.open_dataarray(f'{path_prace}/SALT/SALT_dz_below_1000m_{run}.nc')
    ]
    dt = [
        xr.open_dataarray(f'{path_prace}/SALT/SALT_dz_0-100m_trend_{run}.nc'),
        xr.open_dataarray(f'{path_prace}/SALT/SALT_dz_0-1000m_trend_{run}.nc'),
        xr.open_dataarray(
            f'{path_prace}/SALT/SALT_dz_below_1000m_trend_{run}.nc')
    ]
    for j, (latS, latN) in enumerate(lat_bands):
        MASK = make_Mask(run, latS, latN)
        for d, depth in notebook.tqdm(
                enumerate(['0-100m', '0-1000m', 'below_1000m'])):
            dm_, dt_ = dm[d], dt[d]
            tseries = (dm_ * AREA).where(MASK).sum(dim=['nlat', 'nlon'
                                                        ])  # g/kg*m*m^2=kg
            trend = (dt_ * AREA).where(MASK).sum(dim=['nlat', 'nlon'])
            tseries.name = f'SALT_{depth}_timeseries_{latS}N_{latN}N'
            trend.name = f'SALT_{depth}_trend_{latS}N_{latN}N'
            dd[f'SALT_{depth}_timeseries_{latS}N_{latN}N'] = tseries
            dd[f'SALT_{depth}_trend_{latS}N_{latN}N'] = trend
            ddd.append(tseries)
            ddd.append(trend)
        # print(f'{run:4}', f'{latS:4}', f'{latN:4}', f'{salt.values:4.1e}')

    xr.merge(ddd).to_netcdf(f'{path_results}/SALT/SALT_integrals_{run}.nc')
    return
Example #4
0
    def generate_yrly_global_mean_SST(self, run):
        """ calcaultes the global mean sea surface temperature
        ca. 37 sec for ctrl """
        assert run in ['ctrl', 'lpd']

        da = xr.open_dataarray(f'{path_prace}/SST/SST_yrly_{run}.nc',
                               decode_times=False)
        if run == 'ctrl':
            AREA = xr_AREA(domain='ocn')
            REGION_MASK = xr.open_dataset(file_ex_ocn_ctrl,
                                          decode_times=False).REGION_MASK
        elif run == 'lpd':
            AREA = xr_AREA(domain='ocn_low')
            REGION_MASK = xr.open_dataset(file_ex_ocn_lpd,
                                          decode_times=False).REGION_MASK
        AREA_total = AREA.where(REGION_MASK > 0).sum(dim=['nlat', 'nlon'],
                                                     skipna=True)
        print(AREA_total)
        da_new = (da * AREA).where(REGION_MASK > 0).sum(
            dim=['nlat', 'nlon'], skipna=True) / AREA_total
        da_new.to_netcdf(f'{path_prace}/SST/GMSST_yrly_{run}.nc')
        return
Example #5
0
def CESM_xlfca(run, basin, dsdt, test=False):
    """ performing the LFCA via xlfca of our model output """
    print(run, basin, dsdt)
    # performance had:  North Pacific 20 N, time steps 10: 5.47 s, 100: 27.8 s, 1788 (all): 477s  -> 142 MB
    if run == 'had': dt, domain = '', 'ocn_had'
    elif run == 'ctrl': dt, domain = '_51_301', 'ocn_rect'
    elif run == 'lpd': dt, domain = '_154_404', 'ocn_low'

    if basin == 'North_Pacific':
        mask_nr, bounding_lats, bounding_lons = 2, (20, 68), (110, 255)
    if basin == 'full_Pacific':
        mask_nr, bounding_lats, bounding_lons = 2, (-38, 68), (110, 290)
    if basin == 'Southern_Ocean':
        mask_nr, bounding_lats, bounding_lons = 1, None, None
    if basin == 'North_Atlantic':
        mask_nr, bounding_lats, bounding_lons = 6, (0, 60), (-80, 0)
    fn = f'{path_prace}/SST/SST_monthly_{dsdt}_{run}{dt}.nc'

    MASK = mask_box_in_region(domain=domain,
                              mask_nr=mask_nr,
                              bounding_lats=bounding_lats,
                              bounding_lons=bounding_lons)
    AREA = xr_AREA(domain=domain).where(MASK)
    SST = xr.open_dataarray(fn, decode_times=False).where(MASK)
    if basin in ['North_Pacific', 'full_Pacific'] and run == 'had':  # shifting
        AREA = DS().shift_had(AREA)
        SST = DS().shift_had(SST)
    if basin == 'North_Atlantic' and run == 'ctrl':
        AREA = DS().shift_ocn_rect(AREA)
        SST = DS().shift_ocn_rect(SST)
    if basin == 'North_Atlantic' and run == 'lpd':
        AREA = DS().shift_ocn_low(AREA)
        SST = DS().shift_ocn_low(SST)
    AREA = AREA.where(np.isnan(SST[0, :, :]) == False, drop=True)
    SST = SST.where(np.isnan(SST[0, :, :]) == False, drop=True)
    scale = AREA / AREA.sum()
    scale = xr.apply_ufunc(np.sqrt, scale)
    for n_EOFs in [3, 30]:
        fn_lfca = f'{path_prace}/LFCA/LFCA_{run}_{basin}_{dsdt}_n{n_EOFs}.nc'
        if os.path.exists(fn_lfca): continue
        if test:
            lfca = xlfca(x=SST.isel(time=slice(0, 40)),
                         cutoff=120,
                         truncation=n_EOFs,
                         scale=scale)
        else:
            lfca = xlfca(x=SST, cutoff=120, truncation=n_EOFs, scale=scale)
            lfca.to_netcdf(fn_lfca)

    return lfca
Example #6
0
def SST_index_from_monthly(run, index_loc, MASK):
    """ loads monthly SST data, calculated SST_index, returns raw timeseries"""
    assert run in ['ctrl', 'rcp', 'lpd', 'lpi']
    if run in ['ctrl', 'rcp']:
        domain = 'ocn'
    elif run in ['lpd', 'lpi']:
        domain = 'ocn_low'
    AREA = xr_AREA(domain)
    AREA_index = AREA.where(MASK).sum()

    for i, (y, m, s) in enumerate(
            IterateOutputCESM(domain=domain, run=run, tavg='monthly')):
        if m == 1: print(y)
        xa_SST = xr.open_dataset(s, decode_times=False).TEMP[0, 0, :, :]
        SSTi = SST_index(xa_SST, AREA, index_loc, AREA_index, MASK)
        if i == 0:
            new_SSTi = SSTi.copy()
        else:
            new_SSTi = xr.concat([new_SSTi, SSTi], dim='time')

    return new_SSTi
Example #7
0
 def surface_heat_flux(self, run):
     """ total surface heat flux into ocean basins """
     # 32:20 min ctrl
     # 1min 4s lpd
     if run == 'ctrl': domain = 'ocn'
     elif run in ['lc1', 'lpd']: domain = 'ocn_low'
     da = xr.open_mfdataset(f'{path_prace}/{run}/ocn_yrly_SHF_0*.nc',
                            combine='nested',
                            concat_dim='time').SHF
     print(len(da.time))
     AREA = xr_AREA(domain=domain)
     SHF = spy * (da * AREA).sum(dim=['nlat', 'nlon'])
     SHF.name = 'Global_Ocean'
     for nr in tqdm(np.arange(1, 12)):
         MASK = boolean_mask(domain=domain, mask_nr=nr)
         temp = spy * (da * AREA).where(MASK).sum(dim=['nlat', 'nlon'])
         temp.name = regions_dict[nr]
         SHF = xr.merge([SHF, temp])
     SHF.attrs[
         'quantity'] = 'yrly averaged total surface heat flux, positive down'
     SHF.attrs['units'] = '[J/yr]'
     SHF.to_netcdf(f'{path_prace}/OHC/SHF_{run}.nc')
     return
Example #8
0
    def all_transports(self, run, quantity):
        """ computes heat or salt fluxes """
        assert run in ['ctrl', 'lpd']

        assert quantity in ['SALT', 'OHC']

        if quantity == 'OHC':
            VN, UE = 'VNT', 'UET'
            conversion = rho_sw * cp_sw
            qstr = 'heat'
            unit_out = 'W'
        elif quantity == 'SALT':
            VN, UE = 'VNS', 'UES'
            conversion = rho_sw * 1e-3
            qstr = 'salt'
            unit_out = 'kg/s'

        if run == 'ctrl':
            domain = 'ocn'
            all_transports_list = []

        elif run == 'lpd':
            domain = 'ocn_low'
            mf_fn = f'{path_prace}/{run}/ocn_yrly_{VN}_{UE}_*.nc'
            kwargs = {
                'concat_dim': 'time',
                'decode_times': False,
                'drop_variables': ['TLONG', 'TLAT', 'ULONG', 'ULAT'],
                'parallel': True
            }
            ds = xr.open_mfdataset(mf_fn, **kwargs)

        DZ = xr_DZ(domain=domain)
        adv = self.all_advection_cells(domain=domain)
        AREA = xr_AREA(domain=domain)
        dims = [dim for dim in dll_dims_names(domain=domain)]

        for i, pair in enumerate(tqdm(neighbours)):
            name = f'{qstr}_flux_{regions_dict[pair[0]]}_to_{regions_dict[pair[1]]}'
            #             if i>2:  continue
            adv_E = adv[
                f'adv_E_{regions_dict[pair[0]]}_to_{regions_dict[pair[1]]}']
            adv_N = adv[
                f'adv_N_{regions_dict[pair[0]]}_to_{regions_dict[pair[1]]}']
            MASK = ((abs(adv_E) + abs(adv_N)) /
                    (abs(adv_E) + abs(adv_N))).copy()
            adv_E = adv_E.where(MASK == 1, drop=True)
            adv_N = adv_N.where(MASK == 1, drop=True)
            DZ_ = DZ.where(MASK == 1, drop=True)
            AREA_ = AREA.where(MASK == 1, drop=True)
            if run == 'ctrl':
                for j, (y,m,f) in tqdm(enumerate(IterateOutputCESM(domain='ocn', run='ctrl',\
                                                                   tavg='yrly', name=f'{VN}_{UE}'))):
                    #                     if j>1: continue
                    ds = xr.open_dataset(f,
                                         decode_times=False).where(MASK == 1,
                                                                   drop=True)
                    transport = ((adv_E * ds[UE] + adv_N * ds[VN]) * AREA_ *
                                 DZ_).sum(dim=dims) * conversion
                    transport.name = name
                    transport.attrs['units'] = unit_out
                    if j == 0: transport_t = transport
                    else:
                        transport_t = xr.concat([transport_t, transport],
                                                dim='time')

                all_transports_list.append(transport_t)

            elif run == 'lpd':
                ds_ = ds.where(MASK == 1, drop=True)
                transport = ((adv_E * ds_[UE] + adv_N * ds_[VN]) * AREA_ *
                             DZ_).sum(dim=dims) * conversion

                transport.name = name
                transport.attrs['units'] = unit_out
                if i == 0: all_transports = transport
                else: all_transports = xr.merge([all_transports, transport])

        if run == 'ctrl': all_transports = xr.merge(all_transports_list)

        all_transports.to_netcdf(
            f'{path_prace}/{quantity}/{quantity}_fluxes_{run}.nc')
        return all_transports
Example #9
0
    def generate_OHC_files(self, run, year=None, pwqd=False):
        """ non-detrended OHC files for full length of simulations
        
        One file contains integrals (all global and by basin):
        
        x,y,z .. scalars        
        x,y   .. vertical profiles 
        x     .. "zonal" integrals 
        
        A separate file each for 4 different depth levels
        z     .. 2D maps, global only, but for different vertical levels
 
        # (ocn:      takes about 45 seconds per year: 70 yrs approx 55 mins)
        (ocn:      takes about 14 min per year)
        (ocn_rect: takes about  3 seconds per year: 70 yrs approx 3 mins)
        """
        
        def t2da(da, t):
            """adds time dimension to xr DataArray, then sets time value to t"""
            da = da.expand_dims('time')
            da = da.assign_coords(time=[t])
            return da

        def t2ds(da, name, t):
            """ 
            adds time dimension to xr DataArray, then sets time value to t,
            and then returns as array in xr dataset
            """
            da = t2da(da, t)
            ds = da.to_dataset(name=name)
            return ds
        start = datetime.datetime.now()
        def tss():  # time since start
            return datetime.datetime.now()-start
        print(f'{start}  start OHC calculation: run={run}')
        assert run in ['ctrl', 'rcp', 'lpd', 'lpi']

        if run=='rcp':
            domain = 'ocn'
        elif run=='ctrl':
            domain = 'ocn_rect'
        elif run in ['lpd', 'lpi']:
            domain = 'ocn_low'
            
        (z, lat, lon) = dll_dims_names(domain)

        # geometry
        DZT  = xr_DZ(domain)#.chunk(chunks={z:1})
        AREA = xr_AREA(domain)
        HTN  = xr_HTN(domain)
        LATS = xr_LATS(domain)
        
        def round_tlatlon(das):
            """ rounds TLAT and TLONG to 2 decimals
            some files' coordinates differ in their last digit
            rounding them avoids problems in concatonating
            """
            das['TLAT']   = das['TLAT'].round(decimals=2)
            das['TLONG']  = das['TLONG'].round(decimals=2)
            return das
        if domain=='ocn':
            round_tlatlon(HTN)
            round_tlatlon(LATS)

        MASK = boolean_mask(domain, mask_nr=0)
        DZT  = DZT.where(MASK)#.chunk(chunks={z:1})
        # with chunking for ctrl_rect: 21 sec per iteration, 15 sec without
#         for k in range(42):
#             DZT[k,:,:]  = DZT[k,:,:].where(MASK)
        AREA = AREA.where(MASK)
        HTN  = HTN.where(MASK)
        LATS = LATS.where(MASK)
#         print(f'{datetime.datetime.now()}  done with geometry')
        
        if pwqd:  name = 'TEMP_pwqd'
        else:     name = 'TEMP_PD'
            
#         print(run, domain, name)
        
        for y,m,file in IterateOutputCESM(domain=domain, run=run, tavg='yrly', name=name):
#             print(tss(), y)
            
#             break
            
            if year!=None:  # select specific year
                if year==y:
                    pass
                else:
                    continue
                    
            if pwqd:  file_out = f'{path_samoc}/OHC/OHC_integrals_{run}_{y:04d}_pwqd.nc'
            else:     file_out = f'{path_samoc}/OHC/OHC_integrals_{run}_{y:04d}.nc'
                

            if os.path.exists(file_out) and year is None:
#     #             should check here if all the fields exist
#                 print(f'{datetime.datetime.now()} {y} skipped as files exists already')
#             if y not in [250,251,252,253,254,255,273,274,275]:
                continue
            print(f'{tss()} {y}, {file}')

            t   = y*365  # time in days since year 0, for consistency with CESM date output
#             ds  = xr.open_dataset(file, decode_times=False, chunks={z:1}).TEMP
            ds  = xr.open_dataset(file, decode_times=False).TEMP
            print(f'{tss()} opened dataset')
            if domain=='ocn':
                ds = ds.drop(['ULONG', 'ULAT'])
                ds = round_tlatlon(ds)

#             if ds.PD[0,150,200].round(decimals=0)==0:
#                 ds['PD'] = ds['PD']*1000 + rho_sw
#             elif ds.PD[0,150,200].round(decimals=0)==1:
#                 ds['PD'] = ds['PD']*1000
#             else: 
#                 print('density [g/cm^3] is neither close to 0 or 1')

#             OHC = ds.TEMP*ds.PD*cp_sw
            OHC = ds*rho_sw*cp_sw
            ds.close()
            OHC = OHC.where(MASK)

            OHC_DZT = OHC*DZT
            print(f'{tss()}  {y} calculated OHC & OHC_DZT')
            
            # global, global levels, zonal, zonal levels integrals for different regions
            for mask_nr in tqdm([0,1,2,3,6,7,8,9,10]):
#             for mask_nr in [0,1,2,3,6,7,8,9,10]:
                name = regions_dict[mask_nr]
                da = OHC.where(boolean_mask(domain, mask_nr=mask_nr))
                
                da_g = (da*AREA*DZT).sum(dim=[z, lat, lon])
                da_g.attrs['units'] = '[J]'
                ds_g  = t2ds(da_g , f'OHC_{name}', t)

                da_gl = (da*AREA).sum(dim=[lat, lon])
                da_gl.attrs['units'] = '[J m^-1]'
                ds_gl = t2ds(da_gl, f'OHC_levels_{name}', t)

                if domain=='ocn':  da_z  = xr_int_zonal(da=da, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
                else:  da_z = (da*HTN*DZT).sum(dim=[z, lon])
                da_z.attrs['units'] = '[J m^-1]'
                ds_z = t2ds(da_z , f'OHC_zonal_{name}', t)
                
                if domain=='ocn':  da_zl = xr_int_zonal_level(da=da, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
                else:  da_zl = (da*HTN).sum(dim=[lon])
                da_zl.attrs['units'] = '[J m^-2]'
                ds_zl = t2ds(da_zl, f'OHC_zonal_levels_{name}', t)
                if mask_nr==0:   ds_new = xr.merge([ds_g, ds_gl, ds_z, ds_zl])
                else:            ds_new = xr.merge([ds_new, ds_g, ds_gl, ds_z, ds_zl])
                    
            print(f'{tss()}  done with horizontal calculations')
            
            # vertical integrals
            # full depth
            da_v  = OHC_DZT.sum(dim=z)                         #   0-6000 m
            da_v.attrs = {'depths':f'{OHC_DZT[z][0]-OHC_DZT[z][-1]}',
                          'units':'[J m^-2]'}
            
            if domain in ['ocn', 'ocn_rect']:  zsel = [[0,9], [0,20], [20,26]]
            elif domain=='ocn_low':            zsel = [[0,9], [0,36], [36,45]]
            
            #   0- 100 m
            da_va = OHC_DZT.isel({z:slice(zsel[0][0], zsel[0][1])}).sum(dim=z)  
            da_va.attrs = {'depths':f'{OHC_DZT[z][zsel[0][0]].values:.0f}-{OHC_DZT[z][zsel[0][1]].values:.0f}',
                           'units':'[J m^-2]'}
            
            #   0- 700 m
            da_vb = OHC_DZT.isel({z:slice(zsel[1][0],zsel[1][1])}).sum(dim=z)  
            da_vb.attrs = {'depths':f'{OHC_DZT[z][zsel[1][0]].values:.0f}-{OHC_DZT[z][zsel[1][1]].values:.0f}',
                           'units':'[J m^-2]'}
            
            # 700-2000 m
            da_vc = OHC_DZT.isel({z:slice(zsel[2][0],zsel[2][1])}).sum(dim=z)  
            da_vc.attrs = {'depths':f'{OHC_DZT[z][zsel[2][0]].values:.0f}-{OHC_DZT[z][zsel[2][1]].values:.0f}',
                           'units':'[J m^-2]'}
            
            ds_v  = t2ds(da_v , 'OHC_vertical_0_6000m'  , t)
            ds_va = t2ds(da_va, 'OHC_vertical_0_100m'   , t)
            ds_vb = t2ds(da_vb, 'OHC_vertical_0_700m'   , t)
            ds_vc = t2ds(da_vc, 'OHC_vertical_700_2000m', t)

            ds_new = xr.merge([ds_new, ds_v, ds_va, ds_vb, ds_vc])

            print(f'{tss()}  done making datasets')
#             print(f'output: {file_out}\n')
            ds_new.to_netcdf(path=file_out, mode='w')
            ds_new.close()

#             if y in [2002, 102, 156, 1602]:  break  # for testing only

        # combining yearly files
        
        print(f'{datetime.datetime.now()}  done\n')
        
#         if run=='ctrl':  print('year 205 is wrong and should be averaged by executing `fix_ctrl_year_205()`')
        return
Example #10
0
def GMST_timeseries(run):
    """ builds a timesries of the GMST and saves it to a netCDF
    
    input:
    run    .. (str) ctrl or cp
    
    output:
    ds_new .. xr Dataset containing GMST and T_zonal
    """
    domain = 'atm'
    tavg   = 'yrly'
    name   = 'T_T850_U_V'
    
    if run in ['ctrl', 'rcp', 'hq']:          AREA = xr_AREA('atm')
    elif run=='lpi':                          AREA = xr_AREA('atm_f19')
    elif run in ['lpd', 'lc1', 'lr1', 'lq']:  AREA = xr_AREA('atm_f09')

    AREA_lat   = AREA.sum(dim='lon')
    AREA_total = AREA.sum(dim=('lat','lon'))
        
        
    if run in ['lpd']:  name = None

    ny   = len(IterateOutputCESM(domain=domain, run=run, tavg=tavg, name=name))
    first_yr = IterateOutputCESM(domain=domain, run=run, tavg=tavg, name=name).year
    iterator = IterateOutputCESM(domain=domain, run=run, tavg=tavg, name=name)
    years    = (np.arange(ny) + first_yr)*365  # this is consistent with CESM output
    
    for i, (y, m, file) in enumerate(iterator):
        print(y)
        assert os.path.exists(file)
        if run in ['ctrl', 'rcp', 'lpi', 'hq']:
            da = xr.open_dataset(file, decode_times=False)['T'][-1,:,:]
        elif run in ['lpd', 'lr1', 'lq', 'ld']:
            da = xr.open_dataset(file, decode_times=False)['T'][0,-1,:,:]
        
        if i==0:  # create new xr Dataset
            lats = da.lat.values
            ds_new = xr.Dataset()
            ds_new['GMST']    = xr.DataArray(data=np.zeros((ny)),
                                             coords={'time': years},
                                             dims=('time'))
            ds_new['T_zonal'] = xr.DataArray(data=np.zeros((ny, len(lats))), 
                                             coords={'time': years, 'lat': lats},
                                             dims=('time', 'lat'))
            
        ds_new['GMST'][i]      = (da*AREA).sum(dim=('lat','lon'))/AREA_total
        ds_new['T_zonal'][i,:] = (da*AREA).sum(dim='lon')/AREA_lat
    
    # [K] to [degC]
    for field in ['GMST', 'T_zonal']:  
        ds_new[field] = ds_new[field] + abs_zero

    # rolling linear trends [degC/yr]
    ds_new = rolling_lin_trends(ds=ds_new, ny=ny, years=years)

    # fits
    lfit = np.polyfit(np.arange(ny), ds_new.GMST, 1)
    qfit = np.polyfit(np.arange(ny), ds_new.GMST, 2)
    
    ds_new[f'lin_fit']  = xr.DataArray(data=np.empty((len(ds_new['GMST']))),
                                       coords={'time': years},
                                       dims=('time'),
                                       attrs={'lin_fit_params':lfit})
    ds_new[f'quad_fit'] = xr.DataArray(data=np.empty((len(ds_new['GMST']))),
                                       coords={'time': years},
                                       dims=('time'),
                                       attrs={'quad_fit_params':qfit})

    for t in range(ny):
        ds_new[f'lin_fit'][t]  =                lfit[0]*t + lfit[1]
        ds_new[f'quad_fit'][t] = qfit[0]*t**2 + qfit[1]*t + qfit[2]
        
    ds_new.to_netcdf(path=f'{path_prace}/GMST/GMST_{run}.nc', mode='w')
    
    return ds_new
Example #11
0
def SST_index(index, run, detrend_signal='GMST', time_slice='full'):
    """ calcalates SST time series from yearly detrended SST dataset """
    assert index in ['AMO', 'SOM', 'TPI1', 'TPI2', 'TPI3']
    assert run in ['ctrl', 'rcp', 'lpd', 'lpi', 'had']
    assert detrend_signal in ['GMST', 'AMO', 'SOM', 'TPI1', 'TPI2', 'TPI3']

    print(index, run)

    if run in ['ctrl', 'rcp']:
        domain = 'ocn'  #check this
        dims = ('nlat', 'nlon')
    elif run in ['lpd', 'lpi']:
        domain = 'ocn_low'
        dims = ('nlat', 'nlon')
    elif run == 'had':
        domain = 'ocn_had'
        dims = ('latitude', 'longitude')

    blats, blons, mask_nr = bounding_lats_lons(index)

    MASK = mask_box_in_region(domain=domain,
                              mask_nr=mask_nr,
                              bounding_lats=blats,
                              bounding_lons=blons)
    AREA = xr_AREA(domain=domain).where(MASK)
    index_area = AREA.sum()

    if run == 'had' and detrend_signal in ['AMO', 'SOM'
                                           ] or detrend_signal == 'GMST':
        print(
            f'underlying SST field: detrended with {detrend_signal}, no filtering'
        )
        if detrend_signal == 'GMST':
            print('GMST(t) signal scaled at each grid point\n')
        else:
            print(
                f'{detrend_signal}(t) removed from all SST gridpoints without scaling\n'
            )
        if time_slice == 'full':
            fn = f'{path_samoc}/SST/SST_{detrend_signal}_dt_yrly_{run}.nc'
            trange = ''
        else:
            first_year, last_year = determine_years_from_slice(
                run=run, tres='yrly', time_slice=time_slice)
            trange = f'_{first_year}_{last_year}'
            fn = f'{path_samoc}/SST/SST_{detrend_signal}_dt_yrly{trange}_{run}.nc'
        assert os.path.exists(fn)
        SST_yrly = xr.open_dataarray(fn).where(MASK)
        detr = f'_{detrend_signal}_dt'

    else:  # run=='had' and detrend_signal!='GMST'
        print('underlying SST field: no detrending, no filtering')
        if detrend_signal in ['AMO', 'SOM']:
            print(
                f'{detrend_signal} must subsequently be detrended with polynomial\n'
            )
        else:
            print(
                f'{detrend_signal} must not be detrended since forcing signal compensated in TPI\n'
            )
        SST_yrly = xr.open_dataarray(
            f'{path_samoc}/SST/SST_yrly_{run}.nc').where(MASK)
        detr = ''

    SSTindex = SST_area_average(xa_SST=SST_yrly,
                                AREA=AREA,
                                AREA_index=index_area,
                                MASK=MASK,
                                dims=dims)
    SSTindex.to_netcdf(f'{path_samoc}/SST/{index}{detr}_raw{trange}_{run}.nc')

    return SSTindex
Example #12
0
def PMV_EOF_indices(run, extent):
    """ perform EOF of monthly, deseasonalized SST (detrended with global mean SST)
    NB: we will use 60S to 60N SST data as polar data is limited in observations

    1. for detrending: compute monthly global mean SST time series, deseasonalize them
        >>> see `SST_data_generation.py` file for scripts
    2. North Pacific monthly output fields
        2.1. create monthly SST field
             (if appropriate: determine extend of grid, limit all coordinates)
             save as single file
             a) North of 38 deg S
             b) North of Equator
             b) North of 20 deg N
        2.2. deseasonalize 
        2.3. detrend global mean, deseasonalized SST, 
        2.4. (remove mean at each point)
    3. EOF analysis
       --> index is first principal component
    4. regress time series on global maps
        goal: perform EOF of monthly, deseasonalized SST (detrended with global mean SST)
    NB: we will use 60S to 60N SST data as polar data is limited in observations

    
    
    4. regress time series on global maps
        >>> in correspoinding .ipynb files
    """
    assert run in ['ctrl', 'rcp', 'lpd', 'lpi', 'had']
    assert extent in ['38S', 'Eq', '20N']

    if run in ['ctrl', 'rcp']:
        domain = 'ocn_rect'
        run_name = f'rect_{run}'
    elif run in ['lpd', 'lpi']:
        domain = 'ocn_low'
        run_name = run
    elif run == 'had':
        domain = 'ocn_had'
        run_name = run

    # load fields (generated in `SST_data_generatoin.py`)
    # <SST>(t):  60S-60N (tropical/extratropical) monthly timeseries
    SST_xm = xr.open_dataarray(
        f'{path_samoc}/SST/SST_60S_60N_mean_monthly_{run_name}.nc',
        decode_times=False)

    # SST(t,x,y): monthly SST field limited to Pacific North of `extent` (*)
    SST = xr.open_dataarray(f'{path_samoc}/SST/SST_monthly_{run_name}.nc',
                            decode_times=False)

    print('opened datasets')

    # deseasonalize
    SST_xm_ds = deseasonalize(SST_xm)
    SST_ds = deseasonalize(SST)

    print('deseasonalized datasets')

    # some of the time series are not the same length
    if run == 'ctrl': SST_xm_ds = SST_xm_ds[:-7]
    elif run == 'rcp': SST_xm_ds = SST_xm_ds[:-1]

    # detrend
    SST_ds_dt = SST_ds - SST_xm_ds

    print('detrended SSTs')

    # remove mean at each point
    SST_ds_dt_dm = SST_ds_dt - SST_ds_dt.mean('time')

    # EOF analysis
    # N.B. cut off two year on either end as the arbitrary start month biases the filtered time series
    if extent == '38S':
        latS, lonE = -38, 300
    elif extent == 'Eq':
        latS, lonE = 0, 285
    elif extent == '20N':
        latS, lonE = 20, 255

    AREA = xr_AREA(domain=domain)
    Pac_MASK = mask_box_in_region(domain=domain,
                                  mask_nr=2,
                                  bounding_lats=(latS, 68),
                                  bounding_lons=(110, lonE))
    Pac_area = AREA.where(Pac_MASK)
    fn = f'{path_samoc}/SST/SST_PDO_EOF_{extent}_{run}.nc'

    print('prepared EOF')

    eof, pc = EOF_SST_analysis(xa=SST_ds_dt_dm[24:-24, :, :].where(Pac_MASK),
                               weights=Pac_area,
                               fn=fn)

    print('performed EOF')

    return eof, pc
Example #13
0
File: OHC.py Project: AJueling/CESM
def OHC_parallel(run, mask_nr=0):
    """ ocean heat content calculation """
    print('***************************************************')
    print('* should be run with a dask scheduler             *')
    print('`from dask.distributed import Client, LocalCluster`')
    print('`cluster = LocalCluster(n_workers=2)`')
    print('`client = Client(cluster)`')
    print('***************************************************')

    print(
        f'\n{datetime.datetime.now()}  start OHC calculation: run={run} mask_nr={mask_nr}'
    )
    assert run in ['ctrl', 'rcp', 'lpd', 'lpi']
    assert type(mask_nr) == int
    assert mask_nr >= 0 and mask_nr < 13

    #     file_out = f'{path_samoc}/OHC/OHC_test.nc'
    file_out = f'{path_samoc}/OHC/OHC_integrals_{regions_dict[mask_nr]}_{run}.nc'

    if run in ['ctrl', 'rcp']:
        domain = 'ocn'
    elif run in ['lpd', 'lpi']:
        domain = 'ocn_low'

    MASK = boolean_mask(domain, mask_nr)

    # geometry
    DZT = xr_DZ(domain)
    AREA = xr_AREA(domain)
    HTN = xr_HTN(domain)
    LATS = xr_LATS(domain)
    print(f'{datetime.datetime.now()}  done with geometry')

    # multi-file
    file_list = ncfile_list(domain='ocn', run=run, tavg='yrly', name='TEMP_PD')
    OHC = xr.open_mfdataset(paths=file_list,
                            concat_dim='time',
                            decode_times=False,
                            compat='minimal',
                            parallel=True).drop(['ULAT', 'ULONG'
                                                 ]).TEMP * cp_sw * rho_sw
    if mask_nr != 0:
        OHC = OHC.where(MASK)
    print(f'{datetime.datetime.now()}  done loading data')

    for ds in [OHC, HTN, LATS]:
        round_tlatlon(ds)
    OHC_DZT = OHC * DZT
    print(f'{datetime.datetime.now()}  done OHC_DZT')

    # xr DataArrays
    da_g = xr_int_global(da=OHC, AREA=AREA, DZ=DZT)
    da_gl = xr_int_global_level(da=OHC, AREA=AREA, DZ=DZT)
    da_v = OHC_DZT.sum(dim='z_t')  #xr_int_vertical(da=OHC, DZ=DZT)
    da_va = OHC_DZT.isel(z_t=slice(0, 9)).sum(dim='z_t')  # above 100 m
    da_vb = OHC_DZT.isel(z_t=slice(9, 42)).sum(dim='z_t')  # below 100 m
    da_z = xr_int_zonal(da=OHC, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
    da_zl = xr_int_zonal_level(da=OHC, HTN=HTN, LATS=LATS, AREA=AREA, DZ=DZT)
    print(f'{datetime.datetime.now()}  done calculations')

    # xr Datasets
    ds_g = da_g.to_dataset(name='OHC_global')
    ds_gl = da_gl.to_dataset(name='OHC_global_levels')
    ds_v = da_v.to_dataset(name='OHC_vertical')
    ds_va = da_va.to_dataset(name='OHC_vertical_above_100m')
    ds_vb = da_vb.to_dataset(name='OHC_vertical_below_100m')
    ds_z = da_z.to_dataset(name='OHC_zonal')
    ds_zl = da_zl.to_dataset(name='OHC_zonal_levels')
    print(f'{datetime.datetime.now()}  done dataset')

    print(f'output: {file_out}')

    ds_new = xr.merge([ds_g, ds_gl, ds_z, ds_zl, ds_v, ds_va, ds_vb])
    ds_new.to_netcdf(path=file_out, mode='w')
    #     ds_new.close()
    print(f'{datetime.datetime.now()}  done\n')

    return ds_new
Example #14
0
def make_SFWF_surface_integrals():
    """ calculate SFWF surface integrals for specific regions
    E/P/R/T .. evap, precip, runoff, total
    m/t     .. mean/trend

    ! output as a pickled dictionary
    """
    ds_ctrl = xr.open_dataset(
        f'{path_prace}/ctrl/EVAP_F_PREC_F_ROFF_F_ctrl_mean_200-229.nc')
    ds_lpd = xr.open_dataset(
        f'{path_prace}/lpd/EVAP_F_PREC_F_ROFF_F_lpd_mean_500-529.nc')
    Tm_ctrl = xr.open_dataarray(f'{path_prace}/ctrl/SFWF_ctrl_mean_200-229.nc')
    Tm_lpd = xr.open_dataarray(f'{path_prace}/lpd/SFWF_lpd_mean_500-529.nc')
    Et_rcp = xr.open_dataarray(
        f'{path_prace}/rcp/EVAP_F_yrly_trend_rcp.nc')  # 2D data
    Pt_rcp = xr.open_dataarray(f'{path_prace}/rcp/PREC_F_yrly_trend_rcp.nc')
    Rt_rcp = xr.open_dataarray(f'{path_prace}/rcp/ROFF_F_yrly_trend_rcp.nc')
    Tt_rcp = xr.open_dataarray(f'{path_prace}/rcp/SFWF_yrly_trend_rcp.nc')
    Et_lr1 = xr.open_dataarray(f'{path_prace}/lr1/EVAP_F_yrly_trend_lr1.nc')
    Pt_lr1 = xr.open_dataarray(f'{path_prace}/lr1/PREC_F_yrly_trend_lr1.nc')
    Rt_lr1 = xr.open_dataarray(f'{path_prace}/lr1/ROFF_F_yrly_trend_lr1.nc')
    Tt_lr1 = xr.open_dataarray(
        f'{path_prace}/lr1/SFWF_yrly_trend_lr1.nc')  # for now only sum

    for i, sim in enumerate(['HIGH', 'LOW']):
        print('-----', sim, '-----')
        d = {}
        (Em, Pm, Rm,
         Tm) = [(ds_ctrl.EVAP_F, ds_ctrl.PREC_F, ds_ctrl.ROFF_F, Tm_ctrl),
                (ds_lpd.EVAP_F, ds_lpd.PREC_F, ds_lpd.ROFF_F, Tm_lpd)][i]
        (Et, Pt, Rt, Tt) = [(Et_rcp, Pt_rcp, Rt_rcp, Tt_rcp),
                            (Et_lr1, Pt_lr1, Rt_lr1, Tt_lr1)][i]
        AREA = xr_AREA(domain=['ocn', 'ocn_low'][i])

        for (latS, latN) in lat_bands:
            MASK = make_Mask(run=['ctrl', 'lpd'][i], latS=latS, latN=latN)
            AREA_total = AREA.where(MASK).sum()

            # integrals of mean
            Pmi = (Pm.where(MASK) * AREA).sum().values
            Emi = (Em.where(MASK) * AREA).sum().values
            Rmi = (Rm.where(MASK) * AREA).sum().values
            Tmi = (Tm.where(MASK) * AREA).sum().values
            d['Pmi_mmd'] = Pmi / AREA_total.values * 24 * 3600  # [kg/s] -> [mm/d]
            d['Emi_mmd'] = Emi / AREA_total.values * 24 * 3600
            d['Rmi_mmd'] = Rmi / AREA_total.values * 24 * 3600
            d['Tmi_mmd'] = Tmi / AREA_total.values * 24 * 3600
            d['Pmi_Sv'] = Pmi / 1e9  # [kg/m^2/s] -> [Sv]
            d['Emi_Sv'] = Emi / 1e9
            d['Rmi_Sv'] = Rmi / 1e9
            d['Tmi_Sv'] = Tmi / 1e9

            # integrals of trends
            Pti = (Pt.where(MASK) * AREA).sum().values
            Eti = (Et.where(MASK) * AREA).sum().values
            Rti = (Rt.where(MASK) * AREA).sum().values
            Tti = (Tt.where(MASK) * AREA).sum().values
            d['Pti_mmd'] = Pti / AREA_total.values * 24 * 3600 * 365 * 100  # [mm/d/100yr]
            d['Eti_mmd'] = Eti / AREA_total.values * 24 * 3600 * 365 * 100
            d['Rti_mmd'] = Rti / AREA_total.values * 24 * 3600 * 365 * 100
            d['Tti_mmd'] = Tti / AREA_total.values * 24 * 3600 * 365 * 100
            d['Pti_Sv'] = Pti / 1e9 * 365 * 100  # [Sv/100yr]
            d['Eti_Sv'] = Eti / 1e9 * 365 * 100
            d['Rti_Sv'] = Rti / 1e9 * 365 * 100
            d['Tti_Sv'] = Tti / 1e9 * 365 * 100

            print(f'\n{latS}N to {latN}N,   {AREA_total.values:4.2E} m^2\n')
            print('             PREC  EVAP  ROFF  TOTAL   DIFF')

            print(
                f'[mm/d]      {d["Pmi_mmd"]:5.2f} {d["Emi_mmd"]:5.2f} {d["Rmi_mmd"]:5.2f} {d["Tmi_mmd"]:7.4f} {d["Tmi_mmd"]-(d["Pmi_mmd"]+d["Emi_mmd"]+d["Rmi_mmd"]):7.4f}'
            )
            print(
                f'[mm/d/100y] {d["Pti_mmd"]:5.2f} {d["Eti_mmd"]:5.2f} {d["Rti_mmd"]:5.2f} {d["Tti_mmd"]:7.4f} {d["Tti_mmd"]-(d["Pti_mmd"]+d["Eti_mmd"]+d["Rti_mmd"]):7.4f}'
            )
            print(
                f'[Sv]        {d["Pmi_Sv"]:5.2f} {d["Emi_Sv"]:5.2f} {d["Rmi_Sv"]:5.2f} {d["Tmi_Sv"]:7.4f} {d["Tmi_Sv"]-(d["Pmi_Sv"]+d["Emi_Sv"]+d["Rmi_Sv"]):7.4f}'
            )
            print(
                f'[Sv/100y]   {d["Pti_Sv"]:5.2f} {d["Eti_Sv"]:5.2f} {d["Rti_Sv"]:5.2f} {d["Tti_Sv"]:7.4f} {d["Tti_Sv"]-(d["Pti_Sv"]+d["Eti_Sv"]+d["Rti_Sv"]):7.4f}'
            )
            print(
                f'[%/100y]    {d["Pti_Sv"]/d["Pmi_Sv"]*100:5.1f} {d["Eti_Sv"]/d["Emi_Sv"]*100:5.1f} {d["Rti_Sv"]/d["Rmi_Sv"]*100:5.1f} {d["Tti_Sv"]/d["Tmi_Sv"]*100:5.1f}\n'
            )
            print(
                f'total surface flux:   {d["Tmi_Sv"]:5.2f} Sv  {d["Tti_Sv"]:5.2f} Sv/100yr  {d["Tti_Sv"]/d["Tmi_Sv"]*100:5.1f} %/100yr'
            )
            print('\n\n\n')
            fn = f'{path_results}/SFWF/Atlantic_SFWF_integrals_{sim}_{latS}N_{latN}N'
            save_obj(d, fn)
    return
Example #15
0
import sys
import datetime
sys.path.append("..")
import xarray as xr
from paths import path_prace
from regions import boolean_mask, Atlantic_mask
from timeseries import IterateOutputCESM
from xr_DataArrays import xr_AREA


for j, run in enumerate(['ctrl','lc1']):
    # if j==1: break
    domain = ['ocn', 'ocn_low'][j]
    TAREA = xr_AREA(domain=domain)
    mask_A = Atlantic_mask(domain=domain)
    mask_P = boolean_mask(domain=domain, mask_nr=2)
    mask_S = boolean_mask(domain=domain, mask_nr=1)
    shf, shf_A, shf_P, shf_S = [], [], [], []
    for i, (y,m,f) in enumerate(IterateOutputCESM(domain=domain, run=run, tavg='monthly')):
        da = xr.open_dataset(f, decode_times=False).SHF*TAREA
        shf.append(da.sum())
        shf_A.append(da.where(mask_A).sum())
        shf_P.append(da.where(mask_P).sum())
        shf_S.append(da.where(mask_S).sum())
        # if i==24:  break
    shf = xr.concat(shf, dim='time')
    shf_A = xr.concat(shf_A, dim='time')
    shf_P = xr.concat(shf_P, dim='time')
    shf_S = xr.concat(shf_S, dim='time')
    shf.name = 'Global_Ocean'
    shf_A.name = 'Atlantic_Ocean'