# NB.
probs = getprobs(predscube.data)

# Find loss associated with each action
# ie. matrix multiplication -> in 2D this is LOSSFUN (4x4) * probs (1x4) -> loss per action (1x4)
# in python: np.matmul(LOSSFUN.T, probs) OR (LOSSFUN.T*probs).sum(axis=1) OR np.einsum('ij,i->j',LOSSFUN, probs)
# to broadcast in 3D for four probs levels ie. (4, lon, lat) needs some matrix voodoo help from np.einsum()
# see https://stackoverflow.com/questions/26089893/understanding-numpys-einsum
# NB. Very important to get the dimensions the right way round
actionloss = np.einsum('ij,ikl->jkl', LOSSFUN, probs)
# then find action with lowest loss (ie. along axis=0) with np.argmin(),
# ie. which action minimises the loss
warn = np.argmin(actionloss, axis=0)

# Load BGD outline
bgd = shape.load_shp('sup/BGD_shape.shp')
bgd_shape = bgd.unary_union()

# Mask data
# Set-up dummy cube for warn data
warncube = predscube[:, :, 0]
warncube.data = warn
warncube.rename('warnings')
warncube = bgd_shape.mask_cube(warncube)

# Load populated places
pop = pd.read_csv('sup/BGD_places.txt')

# Load BGD Level 1 admin regions
admin1 = shpreader.Reader('sup/BGD_admin1_lines_internal.shp')
import iris
import iris.plot as iplt
import matplotlib.patheffects as path_effects
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from ascend import shape
from cartopy.mpl.geoaxes import GeoAxes
from matplotlib.colors import BoundaryNorm
from matplotlib.transforms import offset_copy
from mpl_toolkits.axes_grid1 import AxesGrid
from plotting import scale_bar
from user_vars import HCNC

# Load BGD outline
bgd = shape.load_shp('BGD_shape.shp')
bgd_shape = bgd.unary_union()

# Load populated places
pop = pd.read_csv('BGD_places.txt')

# Load BGD Level 1 admin regions
admin1 = shpreader.Reader('BGD_admin1_lines_internal.shp')

# Load model data
gfp = iris.load_cube(HCNC + '/fpgrand.fg.T1Hmax.4p4km.nc',
                     'posterior credible intervals of wind_speed_of_gust')

q95 = gfp[:, :, -2]
q99 = gfp[:, :, -1]
        f'/project/ciid/projects/IKI/historical_catalogue/plots/compare{dfrow.NAME}+wind.png',
        dpi=600)
    plt.close()
    print(f'Done {dfrow.NAME}!')


# Pandas to matplotlib datetime conversion handling etc.
register_matplotlib_converters()

RES = '4p4'  # '4p4' or '1p5'
DAYS = 1  # Days pre downscale data to plot

IBTRACSFILE = '/project/ciid/projects/IKI/obs_datasets/global/IBTrACS/v04r00/ibtracs.NI.list.v04r00.csv'

# Load Validation area shapefile
val = shape.load_shp('/project/ciid/projects/IKI/ArcGIS/ValidationArea2.shp')
val_shape = val.unary_union()

# # Dask Parallel processing
# # Determine the number of processors visible...
# cpu_count = multiprocessing.cpu_count()
# # .. or as given by slurm allocation.
# if 'SLURM_NTASKS' in os.environ:
#     cpu_count = os.environ['SLURM_NTASKS']
# # Do not exceed the number of CPU's available, leaving 1 for the system.
# num_workers = cpu_count - 1
# print('Using {} workers from {} CPUs...'.format(num_workers, cpu_count))

with dask.config.set(num_workers=13):
    dask_bag = db.from_sequence([row for index, row in EVENTS.iterrows()
                                 ]).map(process_storm)
Exemplo n.º 4
0
    
    # Return differnce dataframes
    print(f'Done {dfrow.NAME}!')
    return pd.concat([usdfw, usdfp, nddfw, nddfp, era5dfg, era5dfw, era5dfp, ra2dfg, ra2dfw, ra2dfp], ignore_index=True)


# Pandas to matplotlib datetime conversion handling etc.
register_matplotlib_converters()

RES = '4p4'  # '4p4' or '1p5'
DAYS = 1  # Days pre downscale data to plot

IBTRACSFILE = 'sup/ibtracs.NI.list.v04r00.csv'

# Load Validation area shapefile
val = shape.load_shp('sup/ValidationArea2.shp')
val_shape = val.unary_union()

# Start csv file for recording differences
DIFFCSV = 'validation_diff2.csv'

# # Dask Parallel processing
# # Determine the number of processors visible...
# cpu_count = multiprocessing.cpu_count()
# # .. or as given by slurm allocation.
# if 'SLURM_NTASKS' in os.environ:
#     cpu_count = os.environ['SLURM_NTASKS']
# # Do not exceed the number of CPU's available, leaving 1 for the system.
# num_workers = cpu_count - 1
# print('Using {} workers from {} CPUs...'.format(num_workers, cpu_count))
from ascend import shape
import cartopy.io.shapereader as shpreader
from matplotlib.colors import BoundaryNorm
from mpl_toolkits.axes_grid1 import AxesGrid
from cartopy.mpl.geoaxes import GeoAxes
import matplotlib.patheffects as path_effects
from plotting import scale_bar
import pdb

IBTRACSFILE = 'sup/ibtracs.NI.list.v04r00.csv'
INDIR = 'storm_tracks/'

EVENTS = pd.read_csv('sup/events.csv', header=4)

# Load BGD outline
bgd = shape.load_shp('sup/NorthBoB_coast.shp')
bgd_shape = bgd.unary_union()

# Load populated places
pop = pd.read_csv('sup/BGD_places.txt')

# Load BGD Level 1 admin regions
admin1 = shpreader.Reader('sup/BGD_admin1_lines_internal.shp')
admin2 = shpreader.Reader('sup/WDBII_border_i_L1.shp')

# Load 4.4km geotiff to get rasterio dimensions
raster = rasterio.open('sup/4p4domain_30p0.tif')
# Get lon-lat coordinates - quirk of the function means I have to get them individually
llon, _ = raster.xy(np.zeros(raster.width), range(raster.width))
_, llat = raster.xy(range(raster.height), np.zeros(raster.height))
lons, lats = np.meshgrid(llon, llat)
Exemplo n.º 6
0
from ascend import shape

########## set up names of directories and other variables ################################

# local directory to save data to
data_dir ='/data/users/sburgan/CP4A/processed_data/Tanzania/'

# select a small region e.g. southern Zambia region
#lat1 = 11.89
#lat2 = 358.677
#lon1 = 0.80
#lon2 = 401.05

#load shapefile for Tanzania
natural_earth = ascend.EXAMPLE_NATURAL_EARTH
country = shape.load_shp(natural_earth, name='Tanzania')[0]

#name of the model run
#Allow for runid to be used as a command line / bash argument

#parser = argparse.ArgumentParser()
#parser.add_argument('stashid')
#args = parser.parse_args()
#stash = args.stashid

#stash='ac144'
region ='Tanzania'

#################   set-up functions    #####################