def to_node(self): tset = self.tableset dvs_node = record.nodedict(tset.tableDiscreteVulnerabilitySet) dvf_node = record.nodedict(tset.tableDiscreteVulnerability) for (set_id, vf_id), group in groupby( tset.tableDiscreteVulnerabilityData, ['vulnerabilitySetID', 'vulnerabilityFunctionID']): dvf = dvf_node[set_id, vf_id] imt = dvs_node[(set_id,)].IML['IMT'] coeffs = [] ratios = [] imls = [] for row in group: imls.append(row['IML']) coeffs.append(row['coefficientsVariation']) ratios.append(row['lossRatio']) # check that we can instantiate a VulnerabilityFunction in risklib scientific.VulnerabilityFunction( imt, map(float, imls), map(float, ratios), map(float, coeffs)) dvf.lossRatio.text = ' '.join(ratios) dvf.coefficientsVariation.text = ' '.join(coeffs) dvs_node[(set_id,)].append(dvf) dvs_node[(set_id,)].IML.text = ' '.join(imls) return Node('vulnerabilityModel', nodes=dvs_node.values())
def to_node(self): tset = self.tableset dvs_node = record.nodedict(tset.tableDiscreteVulnerabilitySet) dvf_node = record.nodedict(tset.tableDiscreteVulnerability) for (set_id, vf_id), group in groupby( tset.tableDiscreteVulnerabilityData, ['vulnerabilitySetID', 'vulnerabilityFunctionID']): dvf = dvf_node[set_id, vf_id] imt = dvs_node[(set_id, )].IML['IMT'] coeffs = [] ratios = [] imls = [] for row in group: imls.append(row['IML']) coeffs.append(row['coefficientsVariation']) ratios.append(row['lossRatio']) # check that we can instantiate a VulnerabilityFunction in risklib scientific.VulnerabilityFunction(imt, map(float, imls), map(float, ratios), map(float, coeffs)) dvf.lossRatio.text = ' '.join(ratios) dvf.coefficientsVariation.text = ' '.join(coeffs) dvs_node[(set_id, )].append(dvf) dvs_node[(set_id, )].IML.text = ' '.join(imls) return Node('vulnerabilityModel', nodes=dvs_node.values())
def _to_node(self): """ Add to a gmfset node all the data from a file GmfData.csv of the form:: stochasticEventSetId,imtStr,ruptureId,lon,lat,gmv 1,SA(0.025),,0.0,0.0,0.2 1,SA(0.025),,1.0,0.0,1.4 1,SA(0.025),,0.0,1.0,0.6 1,PGA,,0.0,0.0,0.2 1,PGA,,1.0,0.0,1.4 1,PGA,,0.0,1.0,0.6 The rows are grouped by ses, imt, rupture. """ tset = self.tableset gmfset_node = record.nodedict(tset.tableGmfSet) for (ses, imt, rupture), rows in groupby( tset.tableGmfData, ['stochasticEventSetId', 'imtStr', 'ruptureId']): if imt.startswith('SA'): attr = dict(IMT='SA', saPeriod=imt[3:-1], saDamping='5') else: attr = dict(IMT=imt) if rupture: attr['ruptureId'] = rupture nodes = [records.GmfData(*r).to_node() for r in rows] gmfset_node[(ses,)].append(Node('gmf', attr, nodes=nodes)) return gmfset_node
def to_node(self): """ Build a full discrete fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityDiscrete[0].to_node() ffs_node = record.nodedict(tset.tableFFSetDiscrete) nodamage = float(ffs_node['noDamageLimit']) \ if 'noDamageLimit' in ffs_node else None frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby( tset.tableFFDataDiscrete, ['limitState', 'ffs_ordinal']): data = list(data) # check that we can instantiate a FragilityFunction in risklib if nodamage: scientific.FragilityFunctionDiscrete( ls, [nodamage] + [rec.iml for rec in data], [0.0] + [rec.poe for rec in data], nodamage) else: scientific.FragilityFunctionDiscrete( ls, [rec.iml for rec in data], [rec.poe for rec in data], nodamage) imls = ' '.join(rec['iml'] for rec in data) ffs_node[(ordinal,)].IML.text = imls poes = ' '.join(rec['poe'] for rec in data) n = Node('ffd', dict(ls=ls)) n.append(Node('poEs', text=poes)) ffs_node[(ordinal,)].append(n) return frag
def _to_node(self): """ Add to a gmfset node all the data from a file GmfData.csv of the form:: stochasticEventSetId,imtStr,ruptureId,lon,lat,gmv 1,SA(0.025),,0.0,0.0,0.2 1,SA(0.025),,1.0,0.0,1.4 1,SA(0.025),,0.0,1.0,0.6 1,PGA,,0.0,0.0,0.2 1,PGA,,1.0,0.0,1.4 1,PGA,,0.0,1.0,0.6 The rows are grouped by ses, imt, rupture. """ tset = self.tableset gmfset_node = record.nodedict(tset.tableGmfSet) for (ses, imt, rupture), rows in groupby( tset.tableGmfData, ['stochasticEventSetId', 'imtStr', 'ruptureId']): if imt.startswith('SA'): attr = dict(IMT='SA', saPeriod=imt[3:-1], saDamping='5') else: attr = dict(IMT=imt) if rupture: attr['ruptureId'] = rupture nodes = [records.GmfData(*r).to_node() for r in rows] gmfset_node[(ses, )].append(Node('gmf', attr, nodes=nodes)) return gmfset_node
def to_node(self): """ Build a full discrete fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityDiscrete[0].to_node() ffs_node = record.nodedict(tset.tableFFSetDiscrete) nodamage = float(ffs_node['noDamageLimit']) \ if 'noDamageLimit' in ffs_node else None frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby(tset.tableFFDataDiscrete, ['limitState', 'ffs_ordinal']): data = list(data) # check that we can instantiate a FragilityFunction in risklib if nodamage: scientific.FragilityFunctionDiscrete( [nodamage] + [rec.iml for rec in data], [0.0] + [rec.poe for rec in data], nodamage) else: scientific.FragilityFunctionDiscrete([rec.iml for rec in data], [rec.poe for rec in data], nodamage) imls = ' '.join(rec['iml'] for rec in data) ffs_node[(ordinal, )].IML.text = imls poes = ' '.join(rec['poe'] for rec in data) n = Node('ffd', dict(ls=ls)) n.append(Node('poEs', text=poes)) ffs_node[(ordinal, )].append(n) return frag
def _assetgenerator(self, assets, costtypes, periods): """ Convert assets into asset nodes. :param assets: asset records :param costtypes: the valid cost types :param periods: the valid periods :returns: an iterable over Node objects describing exposure assets """ # each asset contains the subnodes location, costs and occupancies loc_dict = record.nodedict(self.tableset.tableLocation) cost_dict = record.nodedict(self.tableset.tableCost) occ_dict = record.nodedict(self.tableset.tableOccupancy) for asset in assets: ref = asset['asset_ref'] nodes = [] location = loc_dict[(asset['location_id'],)] nodes.append(location) costnodes = [] for ctype in costtypes: cost = cost_dict.get((ref, ctype)) if cost is not None: costnodes.append(cost) if costnodes: nodes.append(Node('costs', {}, nodes=costnodes)) occupancynodes = [] for period in periods: occupancy = occ_dict.get((ref, period)) if occupancy is not None: occupancynodes.append(occupancy) if occupancynodes: nodes.append(Node('occupancies', {}, nodes=occupancynodes)) assetnode = asset.to_node() assetnode.nodes = nodes yield assetnode
def _assetgenerator(self, assets, costtypes, periods): """ Convert assets into asset nodes. :param assets: asset records :param costtypes: the valid cost types :param periods: the valid periods :returns: an iterable over Node objects describing exposure assets """ # each asset contains the subnodes location, costs and occupancies loc_dict = record.nodedict(self.tableset.tableLocation) cost_dict = record.nodedict(self.tableset.tableCost) occ_dict = record.nodedict(self.tableset.tableOccupancy) for asset in assets: ref = asset['asset_ref'] nodes = [] location = loc_dict[(asset['location_id'], )] nodes.append(location) costnodes = [] for ctype in costtypes: cost = cost_dict.get((ref, ctype)) if cost is not None: costnodes.append(cost) if costnodes: nodes.append(Node('costs', {}, nodes=costnodes)) occupancynodes = [] for period in periods: occupancy = occ_dict.get((ref, period)) if occupancy is not None: occupancynodes.append(occupancy) if occupancynodes: nodes.append(Node('occupancies', {}, nodes=occupancynodes)) assetnode = asset.to_node() assetnode.nodes = nodes yield assetnode
def to_node(self): """ Build a full continuous fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityContinuous[0].to_node() ffs_node = record.nodedict(tset.tableFFSetContinuous) frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby( tset.tableFFDataContinuous, ['limitState', 'ffs_ordinal']): data = list(data) n = Node('ffc', dict(ls=ls)) param = dict(row[2:] for row in data) # param, value # check that we can instantiate a FragilityFunction in risklib scientific.FragilityFunctionContinuous( ls, float(param['mean']), float(param['stddev'])) n.append(Node('params', param)) ffs_node[(ordinal,)].append(n) return frag
def to_node(self): """ Build a full continuous fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityContinuous[0].to_node() ffs_node = record.nodedict(tset.tableFFSetContinuous) frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby(tset.tableFFDataContinuous, ['limitState', 'ffs_ordinal']): data = list(data) n = Node('ffc', dict(ls=ls)) param = dict(row[2:] for row in data) # param, value # check that we can instantiate a FragilityFunction in risklib scientific.FragilityFunctionContinuous(float(param['mean']), float(param['stddev'])) n.append(Node('params', param)) ffs_node[(ordinal, )].append(n) return frag