def test_continuous(self): hazard_imls = numpy.array([ 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9, 0.95, 1, 1.05, 1.1, 1.15, 1.2, 1.25, 1.3, 1.35, 1.4 ]) fragility_functions = scientific.FragilityFunctionList( [ scientific.FragilityFunctionContinuous('slight', 0.160, 0.104), scientific.FragilityFunctionContinuous('moderate', 0.225, 0.158), scientific.FragilityFunctionContinuous('extreme', 0.400, 0.300), scientific.FragilityFunctionContinuous('complete', 0.600, 0.480), ], imls=hazard_imls, steps_per_interval=None) hazard_poes = numpy.array([ 0.5917765421, 0.2482053921, 0.1298604374, 0.07718928965, 0.04912904516, 0.03262871528, 0.02226628376, 0.01553639696, 0.01101802934, 0.007905366815, 0.005741833876, 0.004199803178, 0.003088785556, 0.00229291494, 0.001716474683, 0.001284555773, 0.0009583846496, 0.0007102377096, 0.0005201223961, 0.0003899464723, 0.0002997724582, 0.0002287788496, 0.0001726083994, 0.0001279544769, 0.00009229282594, 0.00006368651249, 0.00004249201524, 0.00003033694903, ]) investigation_time = 50. risk_investigation_time = 100. poos = scientific.classical_damage(fragility_functions, hazard_imls, hazard_poes, investigation_time, risk_investigation_time) aaae(poos, [0.56652127, 0.12513401, 0.1709355, 0.06555033, 0.07185889])
def test_continuous_ff(self): fragility_model = { 'RC': [scientific.FragilityFunctionContinuous('LS1', 0.2, 0.05), scientific.FragilityFunctionContinuous('LS2', 0.35, 0.10)], 'RM': [scientific.FragilityFunctionContinuous('LS1', 0.25, 0.08), scientific.FragilityFunctionContinuous('LS2', 0.40, 0.12)]} calculator_rm = workflows.Damage( 'PGA', 'RM', dict(damage=fragility_model['RM'])) calculator_rc = workflows.Damage( 'PGA', 'RC', dict(damage=fragility_model['RC'])) out = calculator_rm( 'damage', ['a1'], [self.hazard['a1']]) [asset_output_a1] = out.damages expected_means = [1562.6067550208, 1108.0189275488, 329.3743174305] expected_stdevs = [968.93502576, 652.7358505746, 347.3929450270] self.assert_ok(asset_output_a1 * 3000, expected_means, expected_stdevs) out = calculator_rm('damage', ['a3'], [self.hazard['a3']]) [asset_output_a3] = out.damages expected_means = [417.3296948271, 387.2084383654, 195.4618668074] expected_stdevs = [304.4769498434, 181.1415598664, 253.91309010185] self.assert_ok(asset_output_a3 * 1000, expected_means, expected_stdevs) rm = asset_output_a1 * 3000 + asset_output_a3 * 1000 [asset_output_a2] = calculator_rc( 'damage', ['a2'], [self.hazard['a2']]).damages expected_means = [56.7201291212, 673.1047565606, 1270.1751143182] expected_stdevs = [117.7802813522, 485.2023172324, 575.8724057319] self.assert_ok(asset_output_a2 * 2000, expected_means, expected_stdevs) rc = asset_output_a2 * 2000 assert_close( rm.mean(0), [1979.9364498479, 1495.2273659142, 524.8361842379]) assert_close( rm.std(0, ddof=1), [1103.6005152909, 745.3252495731, 401.9195159565]) assert_close( rc.mean(0), [56.7201291212, 673.1047565606, 1270.1751143182]) assert_close( rc.std(0, ddof=1), [117.7802813522, 485.2023172324, 575.8724057319])
def _parse_fragility(content): """ Parse the fragility XML file and return fragility_model, fragility_functions, and damage_states for usage in get_risk_models. """ iterparse = iter(parsers.FragilityModelParser(content)) fmt, limit_states = iterparse.next() damage_states = ['no_damage'] + limit_states fragility_functions = collections.defaultdict(dict) tax_imt = dict() for taxonomy, iml, params, no_damage_limit in iterparse: tax_imt[taxonomy] = iml['IMT'] if fmt == "discrete": if no_damage_limit is None: fragility_functions[taxonomy] = [ scientific.FragilityFunctionDiscrete( iml['imls'], poes, iml['imls'][0]) for poes in params ] else: fragility_functions[taxonomy] = [ scientific.FragilityFunctionDiscrete( [no_damage_limit] + iml['imls'], [0.0] + poes, no_damage_limit) for poes in params ] else: fragility_functions[taxonomy] = [ scientific.FragilityFunctionContinuous(*mean_stddev) for mean_stddev in params ] risk_models = dict((tax, dict(damage=RiskModel(tax_imt[tax], None, ffs))) for tax, ffs in fragility_functions.items()) return damage_states, risk_models
def to_node(self): """ Build a full continuous fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityContinuous[0].to_node() ffs_node = record.nodedict(tset.tableFFSetContinuous) frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby(tset.tableFFDataContinuous, ['limitState', 'ffs_ordinal']): data = list(data) n = Node('ffc', dict(ls=ls)) param = dict(row[2:] for row in data) # param, value # check that we can instantiate a FragilityFunction in risklib scientific.FragilityFunctionContinuous(float(param['mean']), float(param['stddev'])) n.append(Node('params', param)) ffs_node[(ordinal, )].append(n) return frag
def get_damage_states_and_risk_models(content): """ Parse the fragility XML file and return a list of damage_states and a dictionary {taxonomy: risk model} """ iterparse = iter(parsers.FragilityModelParser(content)) fmt, limit_states = iterparse.next() damage_states = ['no_damage'] + limit_states fragility_functions = collections.defaultdict(dict) tax_imt = dict() for taxonomy, iml, params, no_damage_limit in iterparse: tax_imt[taxonomy] = iml['IMT'] if fmt == "discrete": if no_damage_limit is None: fragility_functions[taxonomy] = [ scientific.FragilityFunctionDiscrete( iml['imls'], poes, iml['imls'][0]) for poes in params ] else: fragility_functions[taxonomy] = [ scientific.FragilityFunctionDiscrete( [no_damage_limit] + iml['imls'], [0.0] + poes, no_damage_limit) for poes in params ] else: fragility_functions[taxonomy] = [ scientific.FragilityFunctionContinuous(*mean_stddev) for mean_stddev in params ] risk_models = {} for taxonomy, ffs in fragility_functions.items(): dic = dict(damage=List(ffs, imt=tax_imt[taxonomy])) risk_models[taxonomy] = workflows.RiskModel(taxonomy, workflows.Damage(dic)) return damage_states, risk_models
def test_call(self): ffs = scientific.FragilityFunctionContinuous('LS1', 0.5, 1) self._close_to(0.26293, ffs(0.1))
def test_continuous_pickle(self): ffs = scientific.FragilityFunctionContinuous('LS1', 0, 1) pickle.loads(pickle.dumps(ffs))
def test_call(self): ffs = scientific.FragilityFunctionContinuous('LS1', 0.5, 1, 0, 2.) self._close_to(0.26293, ffs(numpy.array([0.1])))
def get_fragility_functions(fname, continuous_fragility_discretization, steps_per_interval=None): """ :param fname: path of the fragility file :param continuous_fragility_discretization: continuous_fragility_discretization parameter :param steps_per_interval: steps_per_interval parameter :returns: damage_states list and dictionary taxonomy -> functions """ [fmodel] = read_nodes(fname, lambda el: el.tag.endswith('fragilityModel'), nodefactory['fragilityModel']) # ~fmodel.description is ignored limit_states = ~fmodel.limitStates tag = 'ffc' if fmodel['format'] == 'continuous' else 'ffd' fragility_functions = AccumDict() # taxonomy -> functions for ffs in fmodel.getnodes('ffs'): add_zero_value = False # NB: the noDamageLimit is only defined for discrete fragility # functions. It is a way to set the starting point of the functions: # if noDamageLimit is at the left of each IMLs, it means that the # function starts at zero at the given point, so we need to add # noDamageLimit to the list of IMLs and zero to the list of poes nodamage = ffs.attrib.get('noDamageLimit') taxonomy = ~ffs.taxonomy imt_str, imls, min_iml, max_iml, imlUnit = ~ffs.IML if fmodel['format'] == 'discrete': if nodamage is not None and nodamage < imls[0]: # discrete fragility imls = [nodamage] + imls add_zero_value = True if steps_per_interval: gen_imls = scientific.fine_graining(imls, steps_per_interval) else: gen_imls = imls else: # continuous: if min_iml is None: raise InvalidFile('Missing attribute minIML, line %d' % ffs.IML.lineno) elif max_iml is None: raise InvalidFile('Missing attribute maxIML, line %d' % ffs.IML.lineno) gen_imls = numpy.linspace(min_iml, max_iml, continuous_fragility_discretization) fragility_functions[taxonomy] = scientific.FragilityFunctionList( [], imt=imt_str, imls=list(gen_imls), no_damage_limit=nodamage, continuous_fragility_discretization= continuous_fragility_discretization, steps_per_interval=steps_per_interval) lstates = [] for ff in ffs.getnodes(tag): ls = ff['ls'] # limit state lstates.append(ls) if tag == 'ffc': with context(fname, ff): mean_stddev = ~ff.params fragility_functions[taxonomy].append( scientific.FragilityFunctionContinuous(ls, *mean_stddev)) else: # discrete with context(fname, ff): poes = ~ff.poEs if add_zero_value: poes = [0.] + poes fragility_functions[taxonomy].append( scientific.FragilityFunctionDiscrete( ls, imls, poes, nodamage)) if lstates != limit_states: raise InvalidFile("Expected limit states %s, got %s in %s" % (limit_states, lstates, fname)) fragility_functions.damage_states = ['no_damage'] + limit_states return fragility_functions