from oasis_utils import read_csvtodf # Parameters ------------------------------------------------------------------ vulnId = 1 summaryset_id = 1 # loss per location ifile_vuln = "static/vulnerability.csv" ifile_dmgmap = "static/damage_bin_dict.csv" pd.options.display.max_rows = 10 # Script ---------------------------------------------------------------------- # Read the location losses from the ELT file vuln0 = read_csvtodf(ifile_vuln) # extract the vulnerability we're interested in vuln0 = vuln0.loc[vuln0.vulnerability_id == vulnId] vuln0.drop('vulnerability_id', 1, inplace=True) # Set the index vuln0 = vuln0.sort_values(['intensity_bin_id', 'damage_bin_id'], axis=0, ascending=[True, True]) vuln0 = vuln0.set_index(['intensity_bin_id', 'damage_bin_id']) # Calculate cumulative and exceedance prob vuln1 = vuln0.groupby('intensity_bin_id').cumsum().rename( columns={'prob': 'cprob'})
# Show the plot print('Pausing while plot is shown...') pylab.show(block=True) return 1 # Script ---------------------------------------------------------------------- # Read the footprint for the eventid we want fp = read_footprint(ifile_fp, eventId) # Read the intensity bins and join fp = pd.merge(fp, read_intensbins(ifile_intensbins), left_index=True, right_index=True) # Get the exceedance probability for the intensity we want fp = fp[fp.right >= intensval]['prob'].groupby(level=0).sum() # Read location file locs = read_csvtodf(ifile_loc) locs.set_index('coverage_id', drop=True, inplace=True) locs.index.names = ['locid'] # Join the footprint by event id locs = pd.merge(locs, fp.to_frame(), left_on='areaperil_id', right_index=True) # Plot plot_footprintmap(locs)
# Formatting ax.set_aspect('equal') ax.autoscale(tight=True) ax.grid() # Add a color bar plt.colorbar(cax, orientation='vertical', label='Loss') # Add a title plt.title("Expected GU loss by location, Type=%s" % losstype) return 1 # Read location file locs = read_csvtodf(ifile_loc) # Read the location losses from the ELT file loclosses0 = read_locationlosses(ifile_elt) # Read the summary file to get the coverageid summ = read_csvtodf(ifile_summ) # Keep the location level losses summ = summ.loc[summ.summaryset_id == summsetId] # Get the coverage id per loss loclosses = pd.merge(loclosses0, summ, left_index=True, right_on='summary_id') # Get the location detail per loss loclosses = pd.merge(loclosses,
""" Plot the output """ import pandas as pd from matplotlib import pyplot as plt import seaborn as sns from oasis_utils import read_csvtodf # Parameters ifile_getmodel = "./results/loccdf.csv" pd.options.display.max_rows = 20 vulnId = 1 eventId = 1 # Read the output file gm = read_csvtodf(ifile_getmodel) gm = gm.loc[(gm.vulnerability_id == vulnId) & (gm.event_id == eventId)] gm.drop(['vulnerability_id', 'event_id'], 1, inplace=True) #gm3 = gm.groupby(["areaperil_id", "bin_mean"])['prob_to'].prod() gm3 = gm.groupby(["areaperil_id", "bin_mean"])['prob_to'].max() gm2 = gm3.to_frame().reset_index().pivot('areaperil_id', 'bin_mean', 'prob_to') # Plot f, ax2 = plt.subplots(figsize=(9, 6)) sns.heatmap(gm2, ax=ax2, cmap='Spectral') plt.show()
ifile_item = "input/items.csv" # Need the footprint to get the intensity per location ifile_fp = "static/footprint.csv" ifile_intensbins = "static/intensity_bin_dict.csv" pd.options.display.max_rows = 10 # Script ---------------------------------------------------------------------- # Read the location losses from the ELT file loclosses = read_locationlosses(ifile_elt_perloc) # Read the summary file to get the coverageid summ = read_csvtodf(ifile_summary) summ = summ.loc[summ.summaryset_id == summaryset_id] summ.drop('summaryset_id', 1, inplace=True) # Get the coverage id per loss loclosses = pd.merge(loclosses, summ, left_index=True, right_on='summary_id') # Join with the items so we can get the areaperil loclosses = pd.merge(loclosses, read_items(ifile_item), on="coverage_id") # Join with the coverages so we can get the TIV loclosses = pd.merge(loclosses, read_coverages(ifile_cvg), left_on="coverage_id", right_index=True)