예제 #1
0
                ],
            ))
        pf[scenario + '_' + target] = pf[keys].mean(axis=1)

print(f'[{dataset}] regridding predictions')
nlcd = load.nlcd(store=store, year=2016, classes=[41, 42, 43, 90])
final_mask = nlcd.sum('band')
final_mask.attrs['crs'] = nlcd.attrs['crs']

if 'biomass' in dataset:
    final_mask.values = final_mask.values * (final_mask.values > 0.5)
else:
    final_mask.values = final_mask.values > 0.5

ds['historical'] = fit.interp(pf, final_mask, var='historical')

for scenario in tqdm(scenarios):
    results = []
    for target in targets:
        key = scenario + '_' + target
        gridded = fit.interp(pf, final_mask, var=key)
        results.append(gridded)
    da = xr.concat(results, dim=xr.Variable('year', targets))
    ds[scenario] = da

account_key = os.environ.get('BLOB_ACCOUNT_KEY')
path = utils.get_store('carbonplan-forests',
                       f'risks/results/web/{dataset}.zarr',
                       account_key=account_key)
ds.to_zarr(path, mode='w')
예제 #2
0
results_dict = {}
# select out bounding boxes
# select out bounding boxes
for impact in ['insects', 'drought', 'fire', 'tmean']:
    results_dict[impact] = {}
    # read in the temperature data from its different sources and create a datacube
    # of the same specs as the risks
    if impact == 'tmean':
        ds = build_climate_cube()
    # grab the risks data
    else:
        store_path = 'risks/results/web/{}_full.zarr'.format(impact)
        ds = xr.open_zarr(
            get_store(
                'carbonplan-forests',
                store_path,
                account_key=account_key,
            )
        )
        ds = ds.assign_coords({'year': np.arange(1980, 2100, 10)})

    if impact in ['insects', 'drought']:
        # restructure the insects/drought ones to align with the temp/fire
        ds = repackage_drought_insects(ds)

    # assign the coords for all of the data sources (this helps make sure that
    # the masking works appropriately and coordinates aren't off by 0.00000001)
    print(ds)
    ds = ds.assign_coords(
        {
            "x": website_mask.x,
예제 #3
0
dataset = args[1]

if len(args) > 2:
    coarsen = int(args[2])
    savename = f'{dataset}_d{coarsen}'
    res = 4000 * coarsen
else:
    coarsen = 0
    savename = dataset
    res = 4000

print(f'[{dataset}] converting to geojson')

precision = 2

store = utils.get_store('carbonplan-forests',
                        f'risks/results/web/{dataset}.zarr')
ds = xr.open_zarr(store)

if coarsen > 0:
    ds = ds.coarsen(x=coarsen, y=coarsen, boundary='trim').mean().compute()

# if dataset == 'fire':
#     scenarios = ['ssp245', 'ssp370', 'ssp585']
#     for scenario in scenarios:
#         keys = list(
#             filter(lambda a: a is not None, [k if scenario in k else None for k in ds.data_vars])
#         )
#         ds[scenario] = functools.reduce(lambda a, b: a + b, [ds[key] for key in keys]) / len(keys)

if 'fire' in dataset or 'biomass' in dataset:
    scenarios = ['ssp245', 'ssp370', 'ssp585']
예제 #4
0
store = 'az'

df = pd.read_csv(
    'https://carbonplan.blob.core.windows.net/carbonplan-scratch/from-bill-04-14-2021/Fig1D_DroughtModel_ModeledFIAlongEnsembleHistMort_FIAlong_04-14-2021.csv'
)

pf = pd.DataFrame()

pf['lat'] = df['V3']
pf['lon'] = df['V2']
pf['mortality'] = df['V6']

pf = pf.dropna().reset_index(drop=True)

ds = xr.Dataset()

nlcd = load.nlcd(store=store, year=2016, classes=[41, 42, 43, 90])
final_mask = nlcd.sum('band')
final_mask.attrs['crs'] = nlcd.attrs['crs']
final_mask.values = final_mask.values > 0.5

gridded = fit.interp(pf, final_mask, var='mortality')

ds['historical'] = gridded

account_key = os.environ.get('BLOB_ACCOUNT_KEY')
path = utils.get_store('carbonplan-forests',
                       'risks/results/web/drought.zarr',
                       account_key=account_key)
ds.to_zarr(path, mode='w')
예제 #5
0
website_mask = (load.nlcd(store="az", year=2016).sel(
    band=[41, 42, 43, 90]).sum("band") > 0.5).astype("float")
gcms = [
    ("ACCESS-CM2", "r1i1p1f1"),
    ("ACCESS-ESM1-5", "r10i1p1f1"),
    ("MRI-ESM2-0", "r1i1p1f1"),
    ("MIROC-ES2L", "r1i1p1f2"),
    ("MPI-ESM1-2-LR", "r10i1p1f1"),
    ("CanESM5-CanOE", "r3i1p2f1"),
]

for impact in impacts_to_process:
    ds = xr.open_zarr(
        get_store(
            'carbonplan-forests',
            'risks/results/web/{}_cmip_high_res.zarr'.format(impact),
            account_key=account_key,
        ))
    ds = ds.assign_coords({
        "x": website_mask.x,
        "y": website_mask.y,
    })
    if impact == 'fire':
        ds = ds.groupby('time.year').sum().coarsen(year=10).mean().compute()
        ds = ds.assign_coords({'year': np.arange(1970, 2100, 10)})
        ds = ds.rolling(year=2).mean().drop_sel(year=1970)
    else:
        ds = ds.assign_coords({'year': np.arange(1970, 2100, 10)})
        ds = ds.rolling(year=2).mean().drop_sel(year=1970)

    ds = ds.assign_coords(
예제 #6
0
    yhat = model.predict(x_z)
    prediction = collect.fire(yhat, climate.sel(time=analysis_time_slice))
    ds['historical'] = (['time', 'y', 'x'], prediction['prediction'])
    ds = ds.assign_coords({
        'x': climate.x,
        'y': climate.y,
        'time': climate.sel(time=analysis_time_slice).time,
        'lat': climate.lat,
        'lon': climate.lon,
    })
    if store == 'local':
        ds.to_zarr('data/fire_historical.zarr', mode='w')
    elif store == 'az':
        path = get_store(
            'carbonplan-forests',
            'risks/results/paper/fire_terraclimate_{}.zarr'.format(run_name),
            account_key=account_key,
        )
        if year == 1984:
            ds.to_zarr(path, consolidated=True, mode='w')
        else:
            ds.to_zarr(path, consolidated=True, mode='a', append_dim='time')
print('[fire] evaluating on future climate')
cmip_models = [
    ('CanESM5-CanOE', 'r3i1p2f1'),
    ('MIROC-ES2L', 'r1i1p1f2'),
    ('ACCESS-CM2', 'r1i1p1f1'),
    ('ACCESS-ESM1-5', 'r10i1p1f1'),
    ('MRI-ESM2-0', 'r1i1p1f1'),
    ('MPI-ESM1-2-LR', 'r10i1p1f1'),
]
from carbonplan_forest_risks import load
from carbonplan_forest_risks.utils import get_store

# flake8: noqa

account_key = os.environ.get('BLOB_ACCOUNT_KEY')

# this is only used to provide the x/y template for the insects/drought tifs
grid_template = (load.nlcd(store="az", year=2016).sel(
    band=[41, 42, 43, 90]).sum("band") > 0.5).astype("float")

# # by passing mask as none we don't mask out any values
# # we'll pass a mask for when we do the webmap data prep
cmip_insect_url_template = "https://carbonplan.blob.core.windows.net/carbonplan-scratch/from-bill-05-03-2021/InsectProjections_Maps_5-5-21/InsectModelProjection_{}.{}.{}-{}.{}-v18climate_05-05-2021.tif"
da = load.impacts(cmip_insect_url_template, grid_template, mask=None) * 100
out_path = get_store('carbonplan-forests',
                     'risks/results/paper/insects_cmip_v5.zarr')
ds = xr.Dataset()
ds['probability'] = da.to_array(dim='vars').rename({'vars': 'gcm'})
ds.to_zarr(out_path, mode='w', consolidated=True)

cmip_drought_url_template = "https://carbonplan.blob.core.windows.net/carbonplan-scratch/from-bill-05-03-2021/DroughtProjections_Maps_5-5-21/DroughtModelProjection_{}.{}.{}-{}.{}-v18climate_05-05-2021.tif"

da = load.impacts(cmip_drought_url_template, grid_template, mask=None) * 100
out_path = get_store('carbonplan-forests',
                     'risks/results/paper/drought_cmip_v5.zarr')
ds = xr.Dataset()
ds['probability'] = da.to_array(dim='vars').rename({'vars': 'gcm'})
ds.to_zarr(out_path, mode='w', consolidated=True)

# load in historical runs to create drought_terraclimate and insects_terraclimate
terraclimate_insect_url_template = "https://carbonplan.blob.core.windows.net/carbonplan-scratch/from-bill-05-03-2021/Fig2_TerraClimateHistModels_4-22-21/InsectModel_ModeledTerraClimateFIAlong_{}-{}_04-22-2021.tif"