def to_node(self): """ Build a full discrete fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityDiscrete[0].to_node() ffs_node = record.nodedict(tset.tableFFSetDiscrete) nodamage = float(ffs_node['noDamageLimit']) \ if 'noDamageLimit' in ffs_node else None frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby( tset.tableFFDataDiscrete, ['limitState', 'ffs_ordinal']): data = list(data) # check that we can instantiate a FragilityFunction in risklib if nodamage: scientific.FragilityFunctionDiscrete( ls, [nodamage] + [rec.iml for rec in data], [0.0] + [rec.poe for rec in data], nodamage) else: scientific.FragilityFunctionDiscrete( ls, [rec.iml for rec in data], [rec.poe for rec in data], nodamage) imls = ' '.join(rec['iml'] for rec in data) ffs_node[(ordinal,)].IML.text = imls poes = ' '.join(rec['poe'] for rec in data) n = Node('ffd', dict(ls=ls)) n.append(Node('poEs', text=poes)) ffs_node[(ordinal,)].append(n) return frag
def to_node(self): node = Node('discreteVulnerabilitySet', dict( vulnerabilitySetID=self['vulnerabilitySetID'], assetCategory=self['assetCategory'], lossCategory=self['lossCategory'])) node.append(Node('IML', dict(IMT=self['IMT']))) return node
def to_node(self): node = Node( 'discreteVulnerability', dict(vulnerabilityFunctionID=self['vulnerabilityFunctionID'], probabilisticDistribution=self['probabilisticDistribution'])) node.append(Node('lossRatio')) node.append(Node('coefficientsVariation')) return node
def to_node(self): node = Node('ffs') ndl = self['noDamageLimit'] if ndl: node['noDamageLimit'] = ndl node.append(Node('taxonomy', text=self['taxonomy'])) node.append(Node('IML', dict(IMT=self['IMT'], imlUnit=self['imlUnit']))) return node
def dmg_dist_total_node(self, data): """ :param data: a sequence of records with attributes .dmg_state, .mean and .stddev :returns: a `totalDmgDist` node """ total = Node("totalDmgDist", nodes=[self.dmg_states]) for row in sorted(data, key=lambda r: r.dmg_state.lsi): damage = Node("damage", dict(ds=row.dmg_state.dmg_state, mean=row.mean, stddev=row.stddev)) total.append(damage) return total
def build_slip_list_node(slip_list): """ :param slip_list: an array of shape (N, 2) with columns (slip, weight) :returns: a hypoList node containing N slip nodes """ sliplist = Node('slipList', {}) for row in slip_list: sliplist.append( Node('slip', dict(weight=row[1]), row[0])) return sliplist
def to_node(self): node = Node('ffs') node.append(Node('taxonomy', text=self['taxonomy'])) ndl = self['noDamageLimit'] if ndl: node['noDamageLimit'] = ndl typ = self['type'] if typ: node['type'] = typ node.append(Node('IML', dict(IMT=self['IMT'], imlUnit=self['imlUnit'], minIML=self['minIML'], maxIML=self['maxIML']))) return node
def cm_node(self, loc, asset_refs, means, stddevs): """ :param loc: a location object with attributes x and y :param asset_refs: asset reference strings :param means: array of means, one per asset :param stddevs: array of stddevs, one per asset :returns: a `CMNode` node """ cm = Node('CMNode', nodes=[self.point_node(loc)]) for asset_ref, mean, stddev in zip(asset_refs, means, stddevs): cf = Node('cf', dict(assetRef=asset_ref, mean=mean, stdDev=stddev)) cm.append(cf) return cm
def dmg_dist_per_taxonomy_node(self, data): """ :param data: a sequence of records with attributes .taxonomy, .mean and .stddev :returns: a `dmgDistPerTaxonomy` node """ node = Node('dmgDistPerTaxonomy', nodes=[self.dmg_states]) data_by_taxo = groupby(data, operator.attrgetter('taxonomy')) for taxonomy in data_by_taxo: means = [row.mean for row in data_by_taxo[taxonomy]] stddevs = [row.stddev for row in data_by_taxo[taxonomy]] node.append(self.dd_node_taxo(taxonomy, means, stddevs)) return node
def dmg_dist_total_node(self, data): """ :param data: a sequence of records with attributes .dmg_state, .mean and .stddev :returns: a `totalDmgDist` node """ total = Node('totalDmgDist', nodes=[self.dmg_states]) for row in sorted(data, key=lambda r: r.dmg_state.lsi): damage = Node('damage', dict(ds=row.dmg_state.dmg_state, mean=row.mean, stddev=row.stddev)) total.append(damage) return total
def build_hypo_list_node(hypo_list): """ :param hypo_list: an array of shape (N, 3) with columns (alongStrike, downDip, weight) :returns: a hypoList node containing N hypo nodes """ hypolist = Node('hypoList', {}) for row in hypo_list: n = Node( 'hypo', dict(alongStrike=row[0], downDip=row[1], weight=row[2])) hypolist.append(n) return hypolist
def dmg_dist_per_asset_node(self, data): """ :param data: a sequence of records with attributes .exposure_data, .mean and .stddev :returns: a `dmgDistPerAsset` node """ node = Node('dmgDistPerAsset', nodes=[self.dmg_states]) data_by_location = groupby(data, lambda r: r.exposure_data.site) for loc in data_by_location: dd = Node('DDNode', nodes=[self.point_node(loc)]) data_by_asset = groupby( data_by_location[loc], lambda r: r.exposure_data.asset_ref, lambda rows: [(r.mean, r.stddev) for r in rows]) for asset_ref, data in data_by_asset.items(): means, stddevs = zip(*data) dd.append(self.asset_node(asset_ref, means, stddevs)) node.append(dd) return node
def collapse_map_node(self, data): """ :param data: a sequence of records with attributes .exposure_data, .mean and .stddev :returns: a `dmgDistPerAsset` node """ node = Node("collapseMap") data_by_location = groupby(data, lambda r: r.exposure_data.site) for loc in data_by_location: asset_refs = [] means = [] stddevs = [] for row in sorted(data_by_location[loc], key=lambda r: r.exposure_data.asset_ref): asset_refs.append(row.exposure_data.asset_ref) means.append(row.mean) stddevs.append(row.stddev) node.append(self.cm_node(loc, asset_refs, means, stddevs)) return node
def collapse_map_node(self, data): """ :param data: a sequence of records with attributes .exposure_data, .mean and .stddev :returns: a `dmgDistPerAsset` node """ node = Node('collapseMap') data_by_location = groupby(data, lambda r: r.exposure_data.site) for loc in data_by_location: asset_refs = [] means = [] stddevs = [] for row in sorted(data_by_location[loc], key=lambda r: r.exposure_data.asset_ref): asset_refs.append(row.exposure_data.asset_ref) means.append(row.mean) stddevs.append(row.stddev) node.append(self.cm_node(loc, asset_refs, means, stddevs)) return node
def write(self, destination, source_model, name=None): """ Exports to NRML """ if os.path.exists(destination): os.remove(destination) self.destination = destination if name: source_model.name = name output_source_model = Node("sourceModel", {"name": name}) dic = groupby(source_model.sources, operator.itemgetter('tectonicRegion')) for i, (trt, srcs) in enumerate(dic.items(), 1): output_source_model.append( Node('sourceGroup', {'tectonicRegion': trt, 'name': 'group %d' % i}, nodes=srcs)) print("Exporting Source Model to %s" % self.destination) with open(self.destination, "wb") as f: nrml.write([output_source_model], f, "%s")
def to_node(self): """ Build a full continuous fragility node from CSV """ tset = self.tableset frag = tset.tableFragilityContinuous[0].to_node() ffs_node = record.nodedict(tset.tableFFSetContinuous) frag.nodes.extend(ffs_node.values()) for (ls, ordinal), data in groupby( tset.tableFFDataContinuous, ['limitState', 'ffs_ordinal']): data = list(data) n = Node('ffc', dict(ls=ls)) param = dict(row[2:] for row in data) # param, value # check that we can instantiate a FragilityFunction in risklib scientific.FragilityFunctionContinuous( ls, float(param['mean']), float(param['stddev'])) n.append(Node('params', param)) ffs_node[(ordinal,)].append(n) return frag
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)): """ :param node: an :class:`openquake.commonib.node.Node` in NRML 0.4 :param fname: path of the fragility file :returns: an :class:`openquake.commonib.node.Node` in NRML 0.5 """ convert_type = {"lognormal": "logncdf"} new = Node('fragilityModel', dict(assetCategory='building', lossCategory='structural', id='fm_%d_converted_from_NRML_04' % next(fmcounter))) with context(fname, node): fmt = node['format'] descr = ~node.description limit_states = ~node.limitStates new.append(Node('description', {}, descr)) new.append((Node('limitStates', {}, ' '.join(limit_states)))) for ffs in node[2:]: IML = ffs.IML # NB: noDamageLimit = None is different than zero nodamage = ffs.attrib.get('noDamageLimit') ff = Node('fragilityFunction', {'format': fmt}) ff['id'] = ~ffs.taxonomy ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')] if fmt == 'continuous': with context(fname, IML): attr = dict(imt=IML['IMT'], minIML=IML['minIML'], maxIML=IML['maxIML']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr)) for ffc in ffs[2:]: with context(fname, ffc): ls = ffc['ls'] param = ffc.params with context(fname, param): m, s = param['mean'], param['stddev'] ff.append(Node('params', dict(ls=ls, mean=m, stddev=s))) else: # discrete with context(fname, IML): imls = ' '.join(map(str, (~IML)[1])) attr = dict(imt=IML['IMT']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr, imls)) for ffd in ffs[2:]: ls = ffd['ls'] with context(fname, ffd): poes = ' '.join(map(str, ~ffd.poEs)) ff.append(Node('poes', dict(ls=ls), poes)) new.append(ff) return new
def to_node(self): node = Node('fragilityModel', dict(format=self['format'])) node.append(Node('description', text=self['description'])) node.append(Node('limitStates', text=self['limitStates'])) return node
def to_node(self): node = Node('exposureModel', dict( id=self['id'], category=self['category'], taxonomySource=self['taxonomySource'])) node.append(Node('description', text=self['description'])) if node['category'] == 'buildings': conv = Node('conversions') conv.append(Node('area', dict(type=self['area_type'], unit=self['area_unit']))) conv.append(Node('costTypes')) conv.append(Node('deductible', dict( isAbsolute=self['deductible_is_absolute']))) conv.append(Node('insuranceLimit', dict( isAbsolute=self['insurance_limit_is_absolute']))) node.append(conv) node.append(Node('assets')) return node