Esempio n. 1
0
def read_smos_sss_here():

    filename = '/stormtrack/data4/yliang/observations/SMOS/SMOS_SSS_2010_2016_V3.nc'
    f = Dataset(filename, 'r')
    sss = f.variables['sss_biasadj'][7:, ::-1, :].data
    lon_sss = f.variables['lon'][:].data
    lat_sss = f.variables['lat'][::-1].data
    f.close()

    area_sss = data_process_f.area_calculate_nonuniform(lon_sss, lat_sss)

    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(-65, -50, -3, 20, lon_sss,
                                             lat_sss)

    print(sss.shape)
    ts_sss_smos = np.zeros((sss.shape[0]))
    for NT in range(sss.shape[0]):
        ts_sss_smos[NT] = np.nansum(
            sss[NT, y1:y2 + 1, x1:x2 + 1] *
            area_sss[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                area_sss[y1:y2 + 1, x1:x2 + 1] *
                sss[NT, y1:y2 + 1, x1:x2 + 1] / sss[NT, y1:y2 + 1, x1:x2 + 1])

    return ts_sss_smos
def read_gleam_evap_here(year_st,year_ed,river_mask):

    print('read gleam evaporation')
    year_ref = 1979
    t0 = (year_st-year_ref)*12
    t1 = (year_ed-year_ref)*12+12

    dirname = '/stormtrack/data4/yliang/observations/GLEAM/'
    filename = 'E_monthly_GLEAM_1980_2017.nc'
    f = Dataset(dirname + filename, 'r')
    evap = f.variables['E'][t0:t1,:,:].data
    lon_evap = f.variables['lon'][:].data
    lat_evap = f.variables['lat'][:].data
    f.close()

# Interpolate to precipitation grid
    [grid_x, grid_y] = np.meshgrid(np.linspace(-180,180,721), np.linspace(-90,84,348))
    [lon_x, lat_y] = np.meshgrid(lon_evap, lat_evap)
    mask_rg = griddata((grid_y.ravel(), grid_x.ravel()), np.squeeze(river_mask).ravel(), (lat_y, lon_x), method='nearest')
    mask_rg[mask_rg!=1] = np.nan

    mask_tmp = mask_rg.copy()*0.
    mask_tmp[:,0:720] = mask_rg[:,720:].copy()
    mask_tmp[:,720:] = mask_rg[:,0:720].copy()

    area = data_process_f.area_calculate_nonuniform(lon_evap, lat_evap)

    ts_evap = np.zeros((evap.shape[0]))
    for NT in range(evap.shape[0]):
        ts_evap[NT] = np.nansum(evap[NT,:,:]*area*mask_tmp)/np.nansum(area*mask_tmp)

    return ts_evap
Esempio n. 3
0
def read_oras4_salinity(year_st, year_ed, mask_sss, lon_sss, lat_sss):

    [so_oras, lon_oras, lat_oras,
     depth_oras] = ORAS4_data_process_f.read_variable(0, 360, -30, 30, 0,
                                                      year_st, year_ed, 'so')

    print(depth_oras)

    # Interpolate to precipitation grid
    [grid_x, grid_y] = np.meshgrid(lon_sss, lat_sss)
    [lon_x, lat_y] = np.meshgrid(lon_oras, lat_oras)
    mask_rg = griddata((grid_y.ravel(), grid_x.ravel()),
                       mask_sss.ravel(), (lat_y, lon_x),
                       method='nearest')

    lon_amz1 = 295
    lon_amz2 = 311
    lat_amz1 = -3
    lat_amz2 = 15

    area_so = data_process_f.area_calculate_nonuniform(lon_oras, lat_oras)
    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1,
                                             lat_amz2, lon_oras, lat_oras)

    #    mask_rg[np.isnan(mask_rg)==False] = 1.

    ts_so = np.zeros((so_oras.shape[0]))
    for NT in range(len(ts_so)):
        ts_so[NT] = np.nansum(
            so_oras[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] *
            mask_rg[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                area_so[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1])

    return ts_so
def read_en4_salinity(year_st,year_ed,mask_sss,lon_sss,lat_sss):

    print('EN4 salinity')

    year_ref = 1979
    t0 = (year_st - year_ref)*12
    t1 = (year_ed - year_ref)*12 + 12

    filename = '/stormtrack/data4/yliang/observations/EN4/EN4_2_1/EN.4.2.1.f.analysis.g10.197901_201812.nc'
    f = Dataset(filename, 'r')
    lon_en4 = f.variables['lon'][:].data
    lat_en4 = f.variables['lat'][:].data
    sss = f.variables['salinity'][t0:t1,0,:,:].data
    f.close()

# Interpolate to precipitation grid
    [grid_x, grid_y] = np.meshgrid(lon_sss, lat_sss)
    [lon_x, lat_y] = np.meshgrid(lon_en4, lat_en4)
    mask_rg = griddata((grid_y.ravel(), grid_x.ravel()), mask_sss.ravel(), (lat_y, lon_x), method='nearest')

    lon_amz1 = 295
    lon_amz2 = 311
    lat_amz1 = -3
    lat_amz2 = 15

    area_so = data_process_f.area_calculate_nonuniform(lon_en4, lat_en4)
    [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon_en4,lat_en4)

    ts_so = np.zeros((sss.shape[0]))
    for NT in range(len(ts_so)):
        ts_so[NT] = np.nansum(sss[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1])

    return ts_so
def read_precl_precipitation(year_st,year_ed,river_mask):

    print('read precl precipitation')
    year_ref = 1948
    t0 = (year_st-year_ref)*12
    t1 = (year_ed-year_ref)*12+12

    dirname = '/stormtrack/data4/yliang/observations/PRECL/'
    filename = 'PRECL.mon.mean.1x1.nc'
    f = Dataset(dirname + filename, mode='r')
    lat = f.variables['lat'][:].data
    lat = np.flipud(lat)
    lon = f.variables['lon'][:].data
    var_tmp = f.variables['precip'][t0:t1,:,:].data
    for NT in range(var_tmp.shape[0]):
         var_tmp[NT,:,:] = np.flipud(np.squeeze(var_tmp[NT,:,:]))
#   var_tmp[abs(var_tmp)>1.e20] = np.nan
    f.close()

# Interpolate to precipitation grid
    [grid_x, grid_y] = np.meshgrid(np.linspace(0,360,721), np.linspace(-90,84,348))
    [lon_x, lat_y] = np.meshgrid(lon, lat)
    mask_rg = griddata((grid_y.ravel(), grid_x.ravel()), np.squeeze(river_mask).ravel(), (lat_y, lon_x), method='nearest')
    mask_rg[mask_rg!=1] = np.nan

    area = data_process_f.area_calculate_nonuniform(lon, lat)

    nt = var_tmp.shape[0]
    ts_out = np.zeros((nt))
    for NT in range(nt):
        ts_out[NT] = np.nansum(var_tmp[NT,:,:]*area*mask_rg)/np.nansum(area*mask_rg)

    return ts_out
def read_aquarius_salinity():

    dirname = '/stormtrack/data4/yliang/observations/Aquarius/'
    filename = 'sss201304.v5.0cap.nc'
    f = Dataset(dirname + filename, 'r')
    lon_sss = f.variables['lon'][:].data
    lat_sss = f.variables['lat'][:].data
    f.close()

    sss_tmp = np.zeros((36,len(lat_sss),len(lon_sss)))
    NN = 0
    for NY in range(3):
        for NM in range(12):
            filename = 'sss' + str(2012+NY) + str(NM+1).zfill(2) + '.v5.0cap.nc'
            f = Dataset(dirname + filename, 'r')
            sss_tmp[NN,:,:] = f.variables['sss_cap'][:,:].data.squeeze()
            NN = NN + 1

    sss_tmp[sss_tmp<-100] = np.nan

    mask_rg = np.nanmean(sss_tmp, axis=0).copy()/np.nanmean(sss_tmp, axis=0).copy()

    lon_amz1 = 295
    lon_amz2 = 311
    lat_amz1 = -3
    lat_amz2 = 15

    area_so = data_process_f.area_calculate_nonuniform(lon_sss, lat_sss)
    [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon_sss,lat_sss)

    ts_so = np.zeros((sss_tmp.shape[0]))
    for NT in range(len(ts_so)):
        ts_so[NT] = np.nansum(sss_tmp[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_rg[y1:y2+1,x1:x2+1])

    return ts_so
def read_gecco2_salinity(year_st, year_ed):
    print('read GECCO2 salinity')

    year_ref = 1979

    yearN = int(year_ed - year_st + 1)
    nt = int(yearN * 12)

    lon_amz1 = 295
    lon_amz2 = 310
    lat_amz1 = -3
    lat_amz2 = 15

    #    lon_amz1 = 300
    #    lon_amz2 = 318
    #    lat_amz1 = -2
    #    lat_amz2 = 11

    t0 = (year_st - year_ref) * 12
    t1 = (year_ed - year_ref) * 12 + 12

    dirname = '/stormtrack/data4/yliang/observations/GECCO2/regrided/'
    filename = 'GECCO2_S_regridded.nc'

    f = Dataset(dirname + filename, 'r')
    lat = f.variables['lat'][:].data.copy()
    lon = f.variables['lon'][:].data.copy()
    so = f.variables['salt_rg'][t0:t1, :, :].data.copy()
    f.close()

    so_mask = so[111, :, :].copy() * 0.
    so_mask_tmp = so_mask.copy()
    so_mask_tmp[np.isnan(so_mask_tmp) == False] = 1

    area_so = data_process_f.area_calculate_nonuniform(lon, lat)
    area_so[np.isnan(so[1, :, :]) == 1] = np.nan
    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1,
                                             lat_amz2, lon, lat)

    # Remove climatology values
    #    [so_rm, so_clim] = data_process_f.climatology_anomalies_calculate(so,1981,2010,year_st)

    ts_so = np.zeros((nt))
    for NT in range(nt):
        ts_so[NT] = np.nansum(so[NT, y1:y2 + 1, x1:x2 + 1] *
                              area_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                                  area_so[y1:y2 + 1, x1:x2 + 1])

    so_mask[y1 - 1:y2 + 1, x1:x2 + 1] = 1.
    so_mask = so_mask_tmp * so_mask
    so_mask[np.isnan(so_mask) == True] = 0.

    #    plt.contourf(lon,lat,so_mask)
    #    plt.colorbar()
    #    plt.show()

    #    sys.exit()

    return ts_so, lat, lon, so_mask
Esempio n. 8
0
def read_gecco2_salinity(year_st, year_ed):
    print('read GECCO2 salinity')

    year_ref = 1979

    yearN = int(year_ed - year_st + 1)
    nt = int(yearN * 12)

    #    lon_amz1 = 295
    #    lon_amz2 = 311
    #    lat_amz1 = -2
    #    lat_amz2 = 15

    lon_amz1 = 295
    lon_amz2 = 311
    lat_amz1 = -3
    lat_amz2 = 15

    t0 = (year_st - year_ref) * 12
    t1 = (year_ed - year_ref) * 12 + 12

    dirname = '/stormtrack/data4/yliang/observations/GECCO2/regrided/'
    filename = 'GECCO2_S_regridded.nc'

    f = Dataset(dirname + filename, 'r')
    lat_tmp = f.variables['lat'][:].data.copy()
    lon = f.variables['lon'][:].data.copy()
    so_tmp = f.variables['salt_rg'][t0:t1, :, :].data.copy()
    f.close()

    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(0, 11, -30, 30, lon, lat_tmp)

    lat = lat_tmp[y1:y2 + 1].copy()
    so = so_tmp[:, y1:y2 + 1, :].copy()

    mask_so = np.nanmean(so, axis=0).copy() / np.nanmean(so, axis=0).copy()
    #    mask_so[np.nanmean(so, axis=0)>38] = np.nan

    area_so = data_process_f.area_calculate_nonuniform(lon, lat)
    area_so[np.isnan(so[1, :, :]) == 1] = np.nan
    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1,
                                             lat_amz2, lon, lat)

    # Remove climatology values
    #    [so_rm, so_clim] = data_process_f.climatology_anomalies_calculate(so,1981,2010,year_st)

    ts_so = np.zeros((nt))
    for NT in range(nt):
        ts_so[NT] = np.nansum(
            so[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] *
            mask_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                area_so[y1:y2 + 1, x1:x2 + 1] * mask_so[y1:y2 + 1, x1:x2 + 1])

    return ts_so, lon, lat, mask_so
def sss_calculate_here(TLONG, TLAT, sss_test):

    # Regrid
    lon = np.linspace(0, 360, 360)
    lat = np.linspace(-79, 89, 180)
    [gridx_out, gridy_out] = np.meshgrid(lon, lat)
    ny = 180
    nx = 360
    [gridx_in, gridy_in] = [TLONG, TLAT]
    nt = sss_test.shape[0]

    sss_test_interp = np.zeros((nt, ny, nx))
    for NT in range(nt):
        print('Ocean Salinity')
        print(NT)
        sss_test_interp[NT, :, :] = griddata(
            (gridy_in.ravel(), gridx_in.ravel()),
            np.squeeze(sss_test[NT, :, :]).ravel(), (gridy_out, gridx_out),
            method='nearest')


# Select region
    lon_amz1 = 295
    lon_amz2 = 311
    lat_amz1 = -3
    lat_amz2 = 15

    mask_sss = (sss_test_interp[11, :, :] / sss_test_interp[11, :, :]).copy()

    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1,
                                             lat_amz2, lon, lat)
    area_so = data_process_f.area_calculate_nonuniform(lon, lat)
    ts_sss_test = np.zeros((nt))
    for NT in range(nt):
        ts_sss_test[NT] = np.nansum(
            sss_test_interp[NT, y1:y2 + 1, x1:x2 + 1] *
            area_so[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                area_so[y1:y2 + 1, x1:x2 + 1] * mask_sss[y1:y2 + 1, x1:x2 + 1])

    ts_rmean = ts_sss_test.copy()
    ts_rmean[int((N - 1) / 2):-int((N - 1) / 2)] = np.convolve(
        ts_sss_test.copy(), np.ones((N, )) / N, mode='valid')
    sss_monthly = ts_rmean.reshape((int(nt / 12), 12))
    [sss_max_test, sss_min_test] = max_min_select(sss_monthly, int(nt / 12))

    [sss_test_trend, sss_test_sig, pvalue
     ] = statistical_f.linear_regression_1D_t_test_with_sample_size_adjusted(
         np.linspace(1, nt / 12, nt / 12)[0:-2],
         sss_max_test[1:-1] - sss_min_test[1:-1], sig_level)

    return sss_test_trend, sss_test_sig, pvalue
Esempio n. 10
0
def read_gpcp_precipitation(year_st, year_ed, river_mask):

    print('read gpcp precipitation')
    year_ref = 1979
    t0 = (year_st - year_ref) * 12
    t1 = (year_ed - year_ref) * 12 + 12

    dirname = '/stormtrack/data4/yliang/observations/GPCP/'
    filename = 'precip.mon.mean.nc'
    f = Dataset(dirname + filename, mode='r')
    lat = f.variables['lat'][:].data
    lon = f.variables['lon'][:].data
    var_tmp = f.variables['precip'][t0:t1, :, :].data
    #   var_tmp[abs(var_tmp)>1.e20] = np.nan
    f.close()

    # Interpolate to precipitation grid
    [grid_x, grid_y] = np.meshgrid(np.linspace(0, 360, 721),
                                   np.linspace(-90, 84, 348))
    [lon_x, lat_y] = np.meshgrid(lon, lat)
    mask_rg = griddata((grid_y.ravel(), grid_x.ravel()),
                       np.squeeze(river_mask).ravel(), (lat_y, lon_x),
                       method='nearest')
    mask_rg[mask_rg != 1] = np.nan

    #    lon_amz1 = 300
    #    lon_amz2 = 320
    #    lat_amz1 = -10
    #    lat_amz2 = 10

    area = data_process_f.area_calculate_nonuniform(lon, lat)

    #    [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon,lat)

    nt = var_tmp.shape[0]
    ts_gpcp = np.zeros((nt))
    for NT in range(nt):
        ts_gpcp[NT] = np.nansum(
            var_tmp[NT, :, :] * area * mask_rg) / np.nansum(area * mask_rg)


#        ts_gpcp[NT] = np.nansum(var_tmp[NT,y1:y2,x1:x2]*area[y1:y2,x1:x2])/np.nansum(area[y1:y2,x1:x2])

#    plt.contourf(lon,lat,var_tmp[11,:,:]*mask_rg)
#    plt.colorbar()
#    plt.show()

    return ts_gpcp, area
def read_ecco_salinity(year_st, year_ed, mask_sss, lon_sss, lat_sss):

    year_ref = 1992
    yearN = int(year_ed - year_ref + 1)

    filename = '/stormtrack/data4/yliang/observations/ECCO/SALT.2011.nc'
    f = Dataset(filename, 'r')
    lon_tmp = f.variables['lon'][:, :].data
    lat_tmp = f.variables['lat'][:, :].data
    f.close()

    lon_ecco = lon_tmp[1, :].copy()
    lat_ecco = lat_tmp[:, 1].copy()

    ny = len(lat_ecco)
    nx = len(lon_ecco)

    sss = np.zeros((yearN * 12, ny, nx))
    NN = 0
    for NY in range(yearN):
        for NM in range(12):
            filename = '/stormtrack/data4/yliang/observations/ECCO/ECCO_v4r4/SALT_' + str(
                int(NY + year_ref)) + '_' + str(NM + 1).zfill(2) + '.nc'
            f = Dataset(filename, 'r')
            sss[NN, :, :] = f.variables['SALT'][:, 0, :, :].data.squeeze()
            f.close()
            NN = NN + 1

    lon_amz1 = -65
    lon_amz2 = -49
    lat_amz1 = -3
    lat_amz2 = 15

    area_so = data_process_f.area_calculate_nonuniform(lon_ecco, lat_ecco)
    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1,
                                             lat_amz2, lon_ecco, lat_ecco)

    mask_rg = np.nanmean(sss, axis=0).copy() / np.nanmean(sss, axis=0).copy()

    ts_so = np.zeros((sss.shape[0]))
    for NT in range(len(ts_so)):
        ts_so[NT] = np.nansum(
            sss[NT, y1:y2 + 1, x1:x2 + 1] * area_so[y1:y2 + 1, x1:x2 + 1] *
            mask_rg[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                area_so[y1:y2 + 1, x1:x2 + 1] * mask_rg[y1:y2 + 1, x1:x2 + 1])

    return ts_so
def sss_calculate_here(TLONG, TLAT, sss_test):

    # Regrid
    lon = np.linspace(0, 360, 360)
    lat = np.linspace(-79, 89, 180)
    [gridx_out, gridy_out] = np.meshgrid(lon, lat)
    ny = 180
    nx = 360
    [gridx_in, gridy_in] = [TLONG, TLAT]
    nt = sss_test.shape[0]

    sss_test_interp = np.zeros((nt, ny, nx))
    for NT in range(nt):
        print('Ocean Salinity')
        print(NT)
        sss_test_interp[NT, :, :] = griddata(
            (gridy_in.ravel(), gridx_in.ravel()),
            np.squeeze(sss_test[NT, :, :]).ravel(), (gridy_out, gridx_out),
            method='nearest')


# Select region
    lon_amz1 = 295
    lon_amz2 = 311
    lat_amz1 = -3
    lat_amz2 = 15

    mask_sss = (sss_test_interp[11, :, :] / sss_test_interp[11, :, :]).copy()

    [x1, x2, y1,
     y2] = data_process_f.find_lon_lat_index(lon_amz1, lon_amz2, lat_amz1,
                                             lat_amz2, lon, lat)
    area_so = data_process_f.area_calculate_nonuniform(lon, lat)
    ts_sss_test = np.zeros((nt))
    for NT in range(nt):
        ts_sss_test[NT] = np.nansum(
            sss_test_interp[NT, y1:y2 + 1, x1:x2 + 1] *
            area_so[y1:y2 + 1, x1:x2 + 1] *
            mask_sss[y1:y2 + 1, x1:x2 + 1]) / np.nansum(
                area_so[y1:y2 + 1, x1:x2 + 1] * mask_sss[y1:y2 + 1, x1:x2 + 1])

    return ts_sss_test
Esempio n. 13
0
    if ast.literal_eval(args.run_flag) == True:

        # ================================================================
        # Read SODA SSS
        # ================================================================
        dirname = '/stormtrack/data4/yliang/observations/SODA/'
        filename = 'soda3.3.1_mn_ocean_reg_1980.nc'
        f = Dataset(dirname + filename, mode='r')
        lat = f.variables['latitude'][:].data
        lon = f.variables['longitude'][:].data
        var_tmp = f.variables['salt'][:, 0, :, :].data.squeeze()
        var_tmp[abs(var_tmp) > 1.e19] = np.nan
        f.close()

        mask_rg = var_tmp[0, :, :] / var_tmp[0, :, :]
        area = data_process_f.area_calculate_nonuniform(lon, lat)

        ny = len(lat)
        nx = len(lon)

        var_test = np.zeros(((year_N - 1) * 12, ny, nx))
        for NY in range(year_N - 1):
            filename = 'soda3.3.1_mn_ocean_reg_' + str(int(1980 + NY)) + '.nc'
            print(filename)
            f = Dataset(dirname + filename, mode='r')
            var_tmp = f.variables['salt'][:, 0, :, :].data.squeeze()
            var_tmp[abs(var_tmp) > 1.e19] = np.nan
            f.close()
            var_test[0 + NY * 12:12 + NY * 12, :, :] = var_tmp.copy()

# Calculate time series
Esempio n. 14
0
       f.close()

# Interpolate to precipitation grid
#       lon_amz1 = 295
#       lon_amz2 = 311
#       lat_amz1 = -3
#       lat_amz2 = 15

       lon_amz1 = 0
       lon_amz2 = 360
       lat_amz1 = -65
       lat_amz2 = 65

       mask_sss = sss_uncertainty[111,:,:]/sss_uncertainty[111,:,:]

       area_so = data_process_f.area_calculate_nonuniform(lon_en4, lat_en4)
       [x1, x2, y1, y2] = data_process_f.find_lon_lat_index(lon_amz1,lon_amz2,lat_amz1,lat_amz2,lon_en4,lat_en4)

       ts_uncertainty = np.zeros((sss_uncertainty.shape[0]))
       ts_obs_weight = np.zeros((sss_obs_weight.shape[0]))
       for NT in range(len(ts_uncertainty)):
           ts_uncertainty[NT] = np.nansum(sss_uncertainty[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1])
           ts_obs_weight[NT] = np.nansum(sss_obs_weight[NT,y1:y2+1,x1:x2+1]*area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1])/np.nansum(area_so[y1:y2+1,x1:x2+1]*mask_sss[y1:y2+1,x1:x2+1])

# ================================================================
# Plot figures
# ================================================================
       fig = plt.figure()
       fig.set_size_inches(10, 10, forward=True)

       plt.axes([0.15, 0.55, 0.65, 0.35])
    #   ts_sss_spin_3 = sss_calculate_here(TLONG,TLAT,sss_spin_3)
    #   ts_sss_spin_4 = sss_calculate_here(TLONG,TLAT,sss_spin_4)
    #   ts_sss_spin_5 = sss_calculate_here(TLONG,TLAT,sss_spin_5)

    # ================================================================
    # Read precipitation and river runoff
    # ================================================================
    dirname = '/stormtrack/data4/yliang/modelling/CESM2/CRU_V7_clm2/extracted/control/cycle1/'
    filename = 'P_R_1970-02.nc'
    f = Dataset(dirname + filename, mode='r')
    lon_in = f.variables['lon'][:].data
    lat_in = f.variables['lat'][:].data
    R_temp = f.variables['QCHANR'][:, :, :].data.squeeze()
    f.close()

    area_in = data_process_f.area_calculate_nonuniform(lon_in, lat_in)

    # Read amazon river basin
    # Interpolate to model grid
    river_mask = read_river_mask()
    lon_mask = np.linspace(0, 360, 721)
    lat_mask = np.linspace(-90, 84, 348)
    [grid_x, grid_y] = np.meshgrid(lon_mask, lat_mask)
    [grid_x_in, grid_y_in] = np.meshgrid(lon_in, lat_in)
    mask_rg = griddata((grid_y.ravel(), grid_x.ravel()),
                       np.squeeze(river_mask).ravel(), (grid_y_in, grid_x_in),
                       method='nearest')
    #   mask_rg[mask_rg!=1] = np.nan

    year_N = int(32)