# Calculate ET for Amazon sub-basins
results_dict = {}
for basin in basin_shapes:
    #print(basin)
    """For each catchment get mask, calc P, R and dS_dt and use to calculate
    catchment ET"""
    areas = {}
    out = {}

    basin_name = substring_before(basin, '_')

    # Get basin mask
    pre_lat = get_lats(pre)
    pre_lon = get_lons(pre)

    pre_mask = extract.get_mask(pre_lat, pre_lon, basin)
    onedeg_mask = extract.get_mask(grace_lat, grace_lon, basin)

    # Get area of basin in m2
    area = np.sum(pre_mask * pre_pixel_size_grid)
    areas[basin_name] = area
    print(basin_name + ' basin = ' + str(area) + ' m2')

    # First calculate pre over catchment for May 2002 to Dec 2018
    Nt = pre.shape[0]
    P_basin = np.nan * np.zeros((Nt))
    shp_path = '/nfs/a68/gyjcab/datasets/shapefiles/'
    for nt in range(Nt):
        masked_data = np.ma.array(pre.data[nt + 0, :, :],
                                  mask=~pre_mask)  # +9 to get from jan 2003
        masked_weights = np.ma.array(pre_pixel_size_grid, mask=~pre_mask)
# generate mask to extract data from basin
lats = get_lats(chirps_pr)
lons = get_lons(chirps_pr)

basin_shapes = ['purus_dissolved', 'madeira_dissolved',
                'negro_dissolved',  'xingu_dissolved',
                'jari_dissolved',  'japura_dissolved',
                'branco_dissolved','aripuana_dissolved',
                'solimoes_dissolved5', 'tapajos_dissolved3',
                'amazon_obidos_dissolved']

results_dict = {}
for basin in basin_shapes:
    print(basin)
    mask = extract.get_mask(lats, lons, basin)

    temp = 1*mask
    plt.figure()

    pixel_size_grid = get_pixel_size(lons)
    pixel_size_grid = np.array([pixel_size_grid]*len(lons)).transpose()

    chirps_vals = []
    
    for yr in range(chirps_pr.shape[0]):
        data = chirps_pr[yr, :, :]
        amazon_mean = get_area_weighted_avg(data, mask, pixel_size_grid)
        chirps_vals.append(amazon_mean)

    chirps_df = pd.DataFrame()
    basin_name = basin.split('_', 1)[0]
    print(basin_name)
    data = catchment_et[basin_name]
    datemin = datetime(2003, 1, 1, 0, 0)
    datemax = datetime(2019, 12, 31, 0, 0)
    idx = pd.date_range(datemin, datemax, freq='MS')
    data = data.reindex(idx, fill_value=np.nan)
    print(data.head(5))
    print(data.tail(5))
    data.ET.iloc[np.where(data.ET < 0)] = np.nan
    startyr = data.index[0].year
    endyr = data.index[-1].year
    nyear = endyr - startyr + 1

    # --------------- Read in basin information
    pre_mask = extract.get_mask(pre_lat, pre_lon, basin)

    # Error in dS/dt
    # Use error values from Wiese et al (2016) for Amazon

    sigma_merr = 6.1  # GRACE measurement error
    sigma_lerr = 0.9  # residual leakage error after applying CRI filter and
    # and gain factors from CLM model

    ## Combine errors in quadrature
    sigma_S = math.sqrt((sigma_merr)**2 + (sigma_lerr)**2)

    # Convert to sigma_dSdt
    sigma_dSdt = (sigma_S * math.sqrt(2))
    print(sigma_dSdt)