Пример #1
0
# Get regional averages
da_81reg = metrics.agg_by_domain(da_grid=da_81, ds_region=ds_region)

# In[7]:

# Get date 30 days ago
ctime = np.datetime64(datetime.datetime.now())
lag_time_30days = ctime - np.timedelta64(30, 'D')
lag_time_90days = ctime - np.timedelta64(90, 'D')
last_sept = metrics.get_season_start_date(ctime)

# Select recent period
da_81_30 = da_81.where(da_81.time >= lag_time_30days, drop=True)
# Aggregate over domain
da_81_30_avg = metrics.calc_extent(da_81_30, ds_region, fill_pole_hole=True)

# Select recent period
da_81_3m = da_81.where(da_81.time >= lag_time_90days, drop=True)
# Aggregate over domain
da_81_3m_avg = metrics.calc_extent(da_81_3m, ds_region, fill_pole_hole=True)

# In[8]:

# # Test regional sums
# da_test_nolake = ((da_81>=0.15).astype('int') * ds_region.area).sum(dim='x').sum(dim='y')/(10**6)
# da_test = esio.calc_extent(da_81, ds_region, fill_pole_hole=True)
# %matplotlib inline
# da_81reg.sum(dim='nregions').plot(color='r', label='Sum of Regional Extents', linewidth=4)
# da_test_nolake.plot(color='b', label='Sum panArctic, Including Lakes')
# da_test.plot(color='k', label='Sum panArctic, Excluding Lakes')
Пример #2
0
def Update_Model_Aggs():
    '''Calculates pan-arctic and regional extents from different forecast models'''

    E = ed.EsioData.load()
    model_dir = E.model_dir
    # Directories
    # Define models to plot
    all_models = list(E.model.keys())
    all_models = [
        x for x in all_models if x not in ['piomas', 'MME', 'MME_NEW']
    ]  # remove some models
    #     all_models = ['uclsipn']
    runType = 'forecast'
    updateall = False

    ds_region = xr.open_mfdataset(
        os.path.join(E.grid_dir, 'sio_2016_mask_Update.nc')).load()

    for model in all_models:
        print(model)

        data_dir = E.model[model][runType]['sipn_nc']
        data_out = os.path.join(model_dir, model, runType, 'sipn_nc_agg')
        if not os.path.exists(data_out):
            os.makedirs(data_out)

        all_files = glob.glob(os.path.join(data_dir, '*.nc'))
        print("Found ", len(all_files), " files.")
        if updateall:
            print("Updating all files...")
        else:
            print("Only updating new files")

        # Remove any "empty" files (sometimes happends with ecmwf downloads)
        all_files_new = []
        for cf in all_files:
            if os.stat(cf).st_size > 0:
                all_files_new.append(cf)
            else:
                print("Found empty file: ", cf,
                      ". Consider deleting or redownloading.")
        all_files = sorted(all_files_new)  # Replace and sort

        # For each file
        for cf in all_files:
            # Check if already imported and skip (unless updateall flag is True)
            # Always import the most recent two months of files (because they get updated)
            f_out = os.path.join(data_out,
                                 os.path.basename(cf))  # netcdf file out
            if not updateall:
                if (os.path.isfile(f_out)) & (cf not in all_files[-2:]):
                    print("Skipping ", os.path.basename(cf),
                          " already imported.")
                    continue  # Skip, file already imported

            ds = xr.open_mfdataset(
                cf,
                chunks={
                    'fore_time': 10,
                    'ensemble': 5,
                    'init_time': 10,
                    'nj': 304,
                    'ni': 448
                },
                parallel=True
            )  # Works but is not eiffecent 5-15 mins wall time
            ds.rename({'nj': 'x', 'ni': 'y'}, inplace=True)

            # Calc panArctic extent
            da_panE = metrics.calc_extent(da=ds.sic, region=ds_region)
            da_panE['nregions'] = 99
            da_panE['region_names'] = 'panArctic'

            # Calc Regional extents
            da_RegE = metrics.agg_by_domain(da_grid=ds.sic,
                                            ds_region=ds_region)

            # Merge
            ds_out = xr.concat([da_panE, da_RegE], dim='nregions')
            ds_out.name = 'Extent'

            ds_out.load(
            )  # This prevents many errors in the dask graph (I don't know why)

            # # Save regridded to netcdf file

            ds_out = None  # Memory clean up
            da_panE = None
            da_RegE = None
            ds = None
            print('Saved ', f_out)

        print("Finished...")
Пример #3
0
        # Add lat and lon dimensions
        ds_sic.coords['lat'] = ds_lat_lon.lat
        ds_sic.coords['lon'] = ds_lat_lon.lon

        # Stereo projected units (m)
        dx = dy = 25000
        xm = np.arange(-3850000, +3750000, +dx)
        ym = np.arange(+5850000, -5350000, -dy)
        ds_sic.coords['xm'] = xr.DataArray(xm, dims=('x'))
        ds_sic.coords['ym'] = xr.DataArray(ym, dims=('y'))

        # Calculate extent and area
        #         ds_sic['extent'] = ((ds_sic.sic>=0.15).astype('int') * ds_region.area).sum(dim='x').sum(dim='y')/(10**6)
        ds_sic['extent'] = metrics.calc_extent(ds_sic.sic,
                                               ds_region,
                                               fill_pole_hole=True)
        #         ds_sic['extent'] = ds_sic['extent'] + (ds_sic.hole_mask.astype('int') * ds_region.area).sum(dim='x').sum(dim='y')/(10**6) # Add hole
        ds_sic['area'] = (ds_sic.sic * ds_region.area).sum(dim='x').sum(
            dim='y') / (10**6)  # No pole hole

        # Save to netcdf file
        ds_sic.to_netcdf(os.path.join(nc_dir, nf.split('.b')[0] + '.nc'))
        ds_sic = None

#     # Calculate extent and area (saved to separte file)
#     if len(new_files) > 0 : # There were some new files
#         print('Calculating extent and area...')
#         ds_all = xr.open_mfdataset(os.path.join(nc_dir,'*.nc'), concat_dim='time',
#                                    autoclose=True, compat='no_conflicts',
#                                    data_vars=['sic'])