def __call__(self, timestep=1, **kwargs): G = self.G Qpeaks = self.Qpeaks dikelist = self.dikelist # Call RfR initialization: self._initialize_rfr_ooi(G, dikelist, self.planning_steps) # Load all kwargs into network. Kwargs are uncertainties and levers: for item in kwargs: # when item is 'discount rate': if 'discount rate' in item: G.nodes[item]['value'] = kwargs[item] # the rest of the times you always get a string like {}_{}: else: string1, string2 = item.split('_') if 'RfR' in string2: # string1: projectID # string2: rfr #step # Note: kwargs[item] in this case can be either 0 # (no project) or 1 (yes project) temporal_step = string2.split(' ')[1] proj_node = G.nodes['RfR_projects {}'.format(temporal_step)] # Cost of RfR project proj_node['cost'] += kwargs[item] * proj_node[string1][ 'costs_1e6'] * 1e6 # Iterate over the location affected by the project for key in proj_node[string1].keys(): if key != 'costs_1e6': # Change in rating curve due to the RfR project G.nodes[key]['rnew'][:, 1] -= kwargs[item] * proj_node[ string1][key] else: # string1: dikename or EWS # string2: name of uncertainty or lever G.nodes[string1][string2] = kwargs[item] self.progressive_height_and_costs(G, dikelist, self.planning_steps) # Percentage of people who can be evacuated for a given warning # time: G.nodes['EWS']['evacuation_percentage'] = G.nodes['EWS']['evacuees'][ G.nodes['EWS']['DaysToThreat']] # Dictionary storing outputs: data = {} for s in self.planning_steps: for Qpeak in Qpeaks: node = G.nodes['A.0'] waveshape_id = node['ID flood wave shape'] #hier gaat soms iets fout time = np.arange(0, node['Qevents_shape'].loc[waveshape_id].shape[0], timestep) node['Qout'] = Qpeak * node['Qevents_shape'].loc[waveshape_id] # Initialize hydrological event: for key in dikelist: node = G.nodes[key] Q_0 = int(G.nodes['A.0']['Qout'][0]) self._initialize_hydroloads(node, time, Q_0) # Calculate critical water level: water above which failure # occurs node['critWL'] = Lookuplin(node['fnew {}'.format(s)], 1, 0, node['pfail']) # Run the simulation: # Run over the discharge wave: for t in range(1, len(time)): # Run over each node of the branch: for n in range(0, len(dikelist)): # Select current node: node = G.nodes[dikelist[n]] if node['type'] == 'dike': # Muskingum parameters: C1 = node['C1'] C2 = node['C2'] C3 = node['C3'] prec_node = G.nodes[node['prec_node']] # Evaluate Q coming in a given node at time t: node['Qin'][t] = Muskingum(C1, C2, C3, prec_node['Qout'][t], prec_node['Qout'][t - 1], node['Qin'][t - 1]) # Transform Q in water levels: node['wl'][t] = Lookuplin( node['rnew'], 0, 1, node['Qin'][t]) # Evaluate failure and, in case, Q in the floodplain and # Q left in the river: res = dikefailure(self.sb, node['Qin'][t], node['wl'][t], node['hbas'][t], node['hground'], node['status'][t - 1], node['Bmax'], node['Brate'], time[t], node['tbreach'], node['critWL']) node['Qout'][t] = res[0] node['Qpol'][t] = res[1] node['status'][t] = res[2] node['tbreach'] = res[3] # Evaluate the volume inside the floodplain as the integral # of Q in time up to time t. node['cumVol'][t] = np.trapz( node['Qpol']) * self.timestepcorr Area = Lookuplin(node['table'], 4, 0, node['wl'][t]) node['hbas'][t] = node['cumVol'][t] / float(Area) elif node['type'] == 'downstream': node['Qin'] = G.nodes[dikelist[n - 1]]['Qout'] # Iterate over the network and store outcomes of interest for a # given event for dike in self.dikelist: node = G.nodes[dike] # If breaches occured: if node['status'][-1] == True: # Losses per event: node['losses {}'.format(s)].append(Lookuplin(node['table'], 6, 4, np.max(node['wl']))) node['deaths {}'.format(s)].append(Lookuplin(node['table'], 6, 3, np.max(node['wl'])) * ( 1 - G.nodes['EWS']['evacuation_percentage'])) node['evacuation_costs {}'.format(s)].append( cost_evacuation(Lookuplin( node['table'], 6, 5, np.max(node['wl']) ) * G.nodes['EWS']['evacuation_percentage'], G.nodes['EWS']['DaysToThreat'])) else: node['losses {}'.format(s)].append(0) node['deaths {}'.format(s)].append(0) node['evacuation_costs {}'.format(s)].append(0) EECosts = [] # Iterate over the network,compute and store ooi over all events for dike in dikelist: node = G.nodes[dike] # Expected Annual Damage: EAD = np.trapz(node['losses {}'.format(s)], self.p_exc) # Discounted annual risk per dike ring: disc_EAD = np.sum(discount(EAD, rate=G.nodes[ 'discount rate {}'.format(s)]['value'], n=self.y_step)) # Expected Annual number of deaths: END = np.trapz(node['deaths {}'.format(s)], self.p_exc) # Expected Evacuation costs: depend on the event, the higher # the event, the more people you have got to evacuate: EECosts.append(np.trapz(node['evacuation_costs {}'.format(s)], self.p_exc)) data.update({'{}_Expected Annual Damage {}'.format(dike,s): disc_EAD, '{}_Expected Number of Deaths {}'.format(dike,s): END, '{}_Dike Investment Costs {}'.format(dike,s ): node['dikecosts {}'.format(s)]}) data.update({'RfR Total Costs {}'.format(s): G.nodes[ 'RfR_projects {}'.format(s)]['cost'.format(s)]}) data.update({'Expected Evacuation Costs {}'.format(s): np.sum(EECosts)}) return data
def get_network(plann_steps_max=10): ''' Build network uploading crucial parameters ''' # Upload dike info df = pd.read_excel('./data/dikeIjssel.xlsx', dtype=object) df = df.set_index('NodeName') nodes = df.to_dict('index') # Create network out of dike info G = nx.MultiDiGraph() for key, attr in nodes.items(): G.add_node(key, **attr) # Select dike type nodes branches = df['branch'].dropna().unique() dike_list = df['type'][df['type'] == 'dike'].index.values dike_branches = {k: df[df['branch'] == k].index.values for k in branches} # Upload fragility curves: frag_curves = pd.read_excel('./data/fragcurves/frag_curves.xlsx', header=None, index_col=0).transpose() calibration_factors = pd.read_excel( './data/fragcurves/calfactors_pf1250.xlsx') # Upload room for the river projects: steps = np.array(range(plann_steps_max)) for n in steps: G.add_node('RfR_projects {}'.format(n), **to_dict_dropna( pd.read_excel('./data/rfr_strategies.xlsx', index_col=0, names=range(5)))) G.node['RfR_projects {}'.format(n)]['type'] = 'measure' G.add_node('discount rate {}'.format(n), **{'value': 0}) # Upload evacuation policies: G.add_node('EWS', **pd.read_excel('./data/EWS.xlsx').to_dict()) G.node['EWS']['type'] = 'measure' # Upload muskingum params: Muskingum_params = pd.read_excel('./data/Muskingum/params.xlsx') # Fill network with crucial info: for dike in dike_list: # Assign fragility curves, assuming it's the same shape for every # location dikeid = 50001010 G.node[dike]['f'] = np.column_stack((frag_curves.loc[:, 'wl'].values, frag_curves.loc[:, dikeid].values)) # Adjust fragility curves G.node[dike]['f'][:, 0] += calibration_factors.loc[dike].values # Determine the level of the dike G.node[dike]['dikelevel'] = Lookuplin(G.node[dike]['f'], 1, 0, 0.5) # Assign stage-discharge relationships filename = './data/rating_curves/{}_ratingcurve_new.txt'.format(dike) G.node[dike]['r'] = np.loadtxt(filename) # Assign losses per location: name = './data/losses_tables/{}_lossestable.xlsx'.format(dike) G.node[dike]['table'] = pd.read_excel(name).values # Assign Muskingum paramters: G.node[dike]['C1'] = Muskingum_params.loc[G.node[dike]['prec_node'], 'C1'] G.node[dike]['C2'] = Muskingum_params.loc[G.node[dike]['prec_node'], 'C2'] G.node[dike]['C3'] = Muskingum_params.loc[G.node[dike]['prec_node'], 'C3'] # The plausible 133 upstream wave-shapes: G.node['A.0']['Qevents_shape'] = pd.read_excel( './data/hydrology/wave_shapes.xls') return G, dike_list, dike_branches, steps