def mfds2multimfd(mfds): """ Convert a list of MFD nodes into a single MultiMFD node """ _, kind = mfds[0].tag.split('}') node = Node('multiMFD', dict(kind=kind, size=len(mfds))) lengths = None for field in mfd.multi_mfd.ASSOC[kind][1:]: alias = mfd.multi_mfd.ALIAS.get(field, field) if field in ('magnitudes', 'occurRates'): data = [~getattr(m, field) for m in mfds] lengths = [len(d) for d in data] data = sum(data, []) # list of lists else: try: data = [m[alias] for m in mfds] except KeyError: if alias == 'binWidth': # missing bindWidth in GR MDFs is ok continue else: raise node.append(Node(field, text=collapse(data))) if lengths: # this is the last field if present node.append(Node('lengths', text=collapse(lengths))) return node
def mfds2multimfd(mfds): """ Convert a list of MFD nodes into a single MultiMFD node """ _, kind = mfds[0].tag.split('}') node = Node('multiMFD', dict(kind=kind)) lengths = None for field in mfd.multi_mfd.ASSOC[kind][1:]: alias = mfd.multi_mfd.ALIAS.get(field, field) if field in ('magnitudes', 'occurRates'): data = [~getattr(m, field) for m in mfds] lengths = [len(d) for d in data] data = sum(data, []) # the list has to be flat else: try: data = [m[alias] for m in mfds] except KeyError: if alias == 'binWidth': # missing bindWidth in GR MDFs is ok continue else: raise node.append(Node(field, text=data)) if lengths: # this is the last field if present node.append(Node('lengths', text=lengths)) return node
def kite_surface_node(profiles): """ :param profiles: a list of lists of points :returns: a Node of kind complexFaultGeometry """ node = Node('kiteSurface') for profile in profiles: node.append(profile_node(profile)) return node
def surface_nodes(self): """ A single element list containing a planarSurface node """ node = Node('planarSurface') for name, lon, lat, depth in zip( 'topLeft topRight bottomLeft bottomRight'.split(), self.corner_lons, self.corner_lats, self.corner_depths): node.append(Node(name, dict(lon=lon, lat=lat, depth=depth))) return [node]
def build_slip_list_node(slip_list): """ :param slip_list: an array of shape (N, 2) with columns (slip, weight) :returns: a hypoList node containing N slip nodes """ sliplist = Node('slipList', {}) for row in slip_list: sliplist.append(Node('slip', dict(weight=row[1]), row[0])) return sliplist
def build_slip_list_node(slip_list): """ :param slip_list: an array of shape (N, 2) with columns (slip, weight) :returns: a hypoList node containing N slip nodes """ sliplist = Node('slipList', {}) for row in slip_list: sliplist.append( Node('slip', dict(weight=row[1]), row[0])) return sliplist
def _read_csv(self, csvnames, dirname): """ :param csvnames: names of csv files, space separated :param dirname: the directory where the csv files are :yields: asset nodes """ expected_header = self._csv_header() fnames = [os.path.join(dirname, f) for f in csvnames.split()] for fname in fnames: with open(fname, encoding='utf-8') as f: fields = next(csv.reader(f)) header = set(fields) if len(header) < len(fields): raise InvalidFile( '%s: The header %s contains a duplicated field' % (fname, header)) elif expected_header - header: raise InvalidFile( 'Unexpected header in %s\nExpected: %s\nGot: %s' % (fname, sorted(expected_header), sorted(header))) occupancy_periods = self.occupancy_periods.split() for fname in fnames: with open(fname, encoding='utf-8') as f: for i, dic in enumerate(csv.DictReader(f), 1): asset = Node('asset', lineno=i) with context(fname, asset): asset['id'] = dic['id'] asset['number'] = valid.positivefloat(dic['number']) asset['taxonomy'] = dic['taxonomy'] if 'area' in dic: # optional attribute asset['area'] = dic['area'] loc = Node( 'location', dict(lon=valid.longitude(dic['lon']), lat=valid.latitude(dic['lat']))) costs = Node('costs') for cost in self.cost_types['name']: a = dict(type=cost, value=dic[cost]) costs.append(Node('cost', a)) occupancies = Node('occupancies') for period in occupancy_periods: a = dict(occupants=float(dic[period]), period=period) occupancies.append(Node('occupancy', a)) tags = Node('tags') for tagname in self.tagcol.tagnames: if tagname != 'taxonomy': tags.attrib[tagname] = dic[tagname] asset.nodes.extend([loc, costs, occupancies, tags]) if i % 100000 == 0: logging.info('Read %d assets', i) yield asset
def dmg_dist_per_taxonomy_node(self, data): """ :param data: a sequence of records with attributes .taxonomy, .mean and .stddev :returns: a `dmgDistPerTaxonomy` node """ node = Node('dmgDistPerTaxonomy', nodes=[self.dmg_states]) data_by_taxo = groupby(data, operator.attrgetter('taxonomy')) for taxonomy in data_by_taxo: means = [row.mean for row in data_by_taxo[taxonomy]] stddevs = [row.stddev for row in data_by_taxo[taxonomy]] node.append(self.dd_node_taxo(taxonomy, means, stddevs)) return node
def cm_node(self, loc, asset_refs, means, stddevs): """ :param loc: a location object with attributes x and y :param asset_refs: asset reference strings :param means: array of means, one per asset :param stddevs: array of stddevs, one per asset :returns: a `CMNode` node """ cm = Node('CMNode', nodes=[self.point_node(loc)]) for asset_ref, mean, stddev in zip(asset_refs, means, stddevs): cf = Node('cf', dict(assetRef=asset_ref, mean=mean, stdDev=stddev)) cm.append(cf) return cm
def build_hypo_list_node(hypo_list): """ :param hypo_list: an array of shape (N, 3) with columns (alongStrike, downDip, weight) :returns: a hypoList node containing N hypo nodes """ hypolist = Node('hypoList', {}) for row in hypo_list: n = Node( 'hypo', dict(alongStrike=row[0], downDip=row[1], weight=row[2])) hypolist.append(n) return hypolist
def _read_csv(self): """ :yields: asset nodes """ expected_header = self._csv_header() for fname in self.datafiles: with open(fname, encoding='utf-8') as f: fields = next(csv.reader(f)) header = set(fields) if len(header) < len(fields): raise InvalidFile( '%s: The header %s contains a duplicated field' % (fname, header)) elif expected_header - header - {'exposure', 'country'}: raise InvalidFile( 'Unexpected header in %s\nExpected: %s\nGot: %s' % (fname, sorted(expected_header), sorted(header))) occupancy_periods = self.occupancy_periods.split() for fname in self.datafiles: with open(fname, encoding='utf-8') as f: for i, dic in enumerate(csv.DictReader(f), 1): asset = Node('asset', lineno=i) with context(fname, asset): asset['id'] = dic['id'] asset['number'] = valid.positivefloat(dic['number']) asset['taxonomy'] = dic['taxonomy'] if 'area' in dic: # optional attribute asset['area'] = dic['area'] loc = Node( 'location', dict(lon=valid.longitude(dic['lon']), lat=valid.latitude(dic['lat']))) costs = Node('costs') for cost in self.cost_types['name']: a = dict(type=cost, value=dic[cost]) if 'retrofitted' in dic: a['retrofitted'] = dic['retrofitted'] costs.append(Node('cost', a)) occupancies = Node('occupancies') for period in occupancy_periods: a = dict(occupants=float(dic[period]), period=period) occupancies.append(Node('occupancy', a)) tags = Node('tags') for tagname in self.tagcol.tagnames: if tagname not in ('taxonomy', 'exposure', 'country'): tags.attrib[tagname] = dic[tagname] asset.nodes.extend([loc, costs, occupancies, tags]) yield asset
def _read_csv(self): """ :yields: asset nodes """ expected_header = self._csv_header() for fname in self.datafiles: with open(fname, encoding='utf-8') as f: fields = next(csv.reader(f)) header = set(fields) if len(header) < len(fields): raise InvalidFile( '%s: The header %s contains a duplicated field' % (fname, header)) elif expected_header - header - {'exposure', 'country'}: raise InvalidFile( 'Unexpected header in %s\nExpected: %s\nGot: %s' % (fname, sorted(expected_header), sorted(header))) occupancy_periods = self.occupancy_periods.split() for fname in self.datafiles: with open(fname, encoding='utf-8') as f: for i, dic in enumerate(csv.DictReader(f), 1): asset = Node('asset', lineno=i) with context(fname, asset): asset['id'] = dic['id'] asset['number'] = valid.positivefloat(dic['number']) asset['taxonomy'] = dic['taxonomy'] if 'area' in dic: # optional attribute asset['area'] = dic['area'] loc = Node('location', dict(lon=valid.longitude(dic['lon']), lat=valid.latitude(dic['lat']))) costs = Node('costs') for cost in self.cost_types['name']: a = dict(type=cost, value=dic[cost]) if 'retrofitted' in dic: a['retrofitted'] = dic['retrofitted'] costs.append(Node('cost', a)) occupancies = Node('occupancies') for period in occupancy_periods: a = dict(occupants=float(dic[period]), period=period) occupancies.append(Node('occupancy', a)) tags = Node('tags') for tagname in self.tagcol.tagnames: if tagname not in ( 'taxonomy', 'exposure', 'country'): tags.attrib[tagname] = dic[tagname] asset.nodes.extend([loc, costs, occupancies, tags]) yield asset
def dmg_dist_total_node(self, data): """ :param data: a sequence of records with attributes .dmg_state, .mean and .stddev :returns: a `totalDmgDist` node """ total = Node('totalDmgDist', nodes=[self.dmg_states]) for row in sorted(data, key=lambda r: r.dmg_state.lsi): damage = Node( 'damage', dict(ds=row.dmg_state.dmg_state, mean=row.mean, stddev=row.stddev)) total.append(damage) return total
def dmg_dist_per_asset_node(self, data): """ :param data: a sequence of records with attributes .exposure_data, .mean and .stddev :returns: a `dmgDistPerAsset` node """ node = Node('dmgDistPerAsset', nodes=[self.dmg_states]) data_by_location = groupby(data, lambda r: r.exposure_data.site) for loc in data_by_location: dd = Node('DDNode', nodes=[self.point_node(loc)]) data_by_asset = groupby( data_by_location[loc], lambda r: r.exposure_data.asset_ref, lambda rows: [(r.mean, r.stddev) for r in rows]) for asset_ref, data in data_by_asset.items(): means, stddevs = zip(*data) dd.append(self.asset_node(asset_ref, means, stddevs)) node.append(dd) return node
def collapse_map_node(self, data): """ :param data: a sequence of records with attributes .exposure_data, .mean and .stddev :returns: a `dmgDistPerAsset` node """ node = Node('collapseMap') data_by_location = groupby(data, lambda r: r.exposure_data.site) for loc in data_by_location: asset_refs = [] means = [] stddevs = [] for row in sorted(data_by_location[loc], key=lambda r: r.exposure_data.asset_ref): asset_refs.append(row.exposure_data.asset_ref) means.append(row.mean) stddevs.append(row.stddev) node.append(self.cm_node(loc, asset_refs, means, stddevs)) return node
def df_to_tree(tree_df, validate=True, omit=None, sub=None): ''' Converts logic tree :class:`pandas.DataFrame` to tree of :class:`openquake.baselib.node.Node` objects which then be written to a file using :func:`openquake.hazardlib.nrml.write`. ''' tree = Node('logicTree', {'logicTreeID': 'lt1'}, None) for i, level in tree_df.iterrows(): branching_level_attr = {'branchingLevelID': 'bl%d' % (i + 1)} branching_level = Node('logicTreeBranchingLevel', branching_level_attr, None) branch_set_attr = { 'branchSetID': 'bs%d' % (i + 1), 'uncertaintyType': level['uncertaintyType'] } for key in level.keys(): if 'applyTo' in key and level[key] != 'all': branch_set_attr.update({key: level[key]}) if 'uncertaintyWeight' in level.keys(): weights = level['uncertaintyWeight'] else: weights = None models_weights = models_with_weights(level['uncertaintyType'], level['uncertaintyModel'], weights, branch_set_attr['branchSetID'], validate=validate, omit=omit, sub=sub) if not models_weights: continue add_branch_set(branching_level, branch_set_attr, models_weights) tree.append(branching_level) return tree
def build_multi_mfd(mfd): """ Parses the MultiMFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.multi_mfd.MultiMFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ node = Node("multiMFD", dict(kind=mfd.kind)) for name in sorted(mfd.kwargs): values = mfd.kwargs[name] if name in ('magnitudes', 'occurRates'): values = sum(values, []) node.append(Node(name, text=values)) if 'occurRates' in mfd.kwargs: lengths = [len(rates) for rates in mfd.kwargs['occurRates']] node.append(Node('lengths', text=lengths)) return node
def write(self, destination, source_model, name=None): """ Exports to NRML """ if os.path.exists(destination): os.remove(destination) self.destination = destination if name: source_model.name = name output_source_model = Node("sourceModel", {"name": name}) dic = groupby(source_model.sources, operator.itemgetter('tectonicRegion')) for i, (trt, srcs) in enumerate(dic.items(), 1): output_source_model.append( Node('sourceGroup', {'tectonicRegion': trt, 'name': 'group %d' % i}, nodes=srcs)) print("Exporting Source Model to %s" % self.destination) with open(self.destination, "wb") as f: nrml.write([output_source_model], f, "%s")
def build_multi_mfd(mfd): """ Parses the MultiMFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.multi_mfd.MultiMFD` :returns: Instance of :class:`openquake.baselib.node.Node` """ node = Node("multiMFD", dict(kind=mfd.kind, size=mfd.size)) for name in sorted(mfd.kwargs): values = mfd.kwargs[name] if name in ('magnitudes', 'occurRates'): values = sum(values, []) node.append(Node(name, text=values)) if 'occurRates' in mfd.kwargs: lengths = [len(rates) for rates in mfd.kwargs['occurRates']] node.append(Node('lengths', text=lengths)) return node
def export_site_model(ekey, dstore): dest = dstore.export_path('site_model.xml') site_model_node = Node('siteModel') hdffields = 'lons lats vs30 vs30measured z1pt0 z2pt5 '.split() xmlfields = 'lon lat vs30 vs30Type z1pt0 z2pt5'.split() recs = [tuple(rec[f] for f in hdffields) for rec in dstore['sitecol'].array] unique_recs = sorted(set(recs)) for rec in unique_recs: n = Node('site') for f, hdffield in enumerate(hdffields): xmlfield = xmlfields[f] if hdffield == 'vs30measured': value = 'measured' if rec[f] else 'inferred' else: value = rec[f] n[xmlfield] = value site_model_node.append(n) with open(dest, 'wb') as f: nrml.write([site_model_node], f) return [dest]
def export_site_model(ekey, dstore): dest = dstore.export_path('site_model.xml') site_model_node = Node('siteModel') hdf2xml = dict(lons='lon', lats='lat', depths='depth', vs30measured='vs30Type') for rec in dstore['sitecol'].array: n = Node('site') for hdffield in rec.dtype.names: if hdffield == 'sids': # skip continue elif hdffield == 'depth' and rec[hdffield] == 0: continue xmlfield = hdf2xml.get(hdffield, hdffield) if hdffield == 'vs30measured': value = 'measured' if rec[hdffield] else 'inferred' else: value = rec[hdffield] n[xmlfield] = value site_model_node.append(n) with open(dest, 'wb') as f: nrml.write([site_model_node], f) return [dest]
def complex_fault_node(edges): """ :param edges: a list of lists of points :returns: a Node of kind complexFaultGeometry """ node = Node('complexFaultGeometry') node.append(edge_node('faultTopEdge', edges[0])) for edge in edges[1:-1]: node.append(edge_node('intermediateEdge', edge)) node.append(edge_node('faultBottomEdge', edges[-1])) return node
def simple_fault_node(fault_trace, dip, upper_depth, lower_depth): """ :param fault_trace: an object with an attribute .points :param dip: dip parameter :param upper_depth: upper seismogenic depth :param lower_depth: lower seismogenic depth :returns: a Node of kind simpleFaultGeometry """ node = Node('simpleFaultGeometry') line = [] for p in fault_trace.points: line.append(p.longitude) line.append(p.latitude) node.append(Node('gml:LineString', nodes=[Node('gml:posList', {}, line)])) node.append(Node('dip', {}, dip)) node.append(Node('upperSeismoDepth', {}, upper_depth)) node.append(Node('lowerSeismoDepth', {}, lower_depth)) return node
def add_branch_set(branching_level, branch_set_attr, models_weights): ''' Add a branch set to a branching level. ''' branch_set = Node('logicTreeBranchSet', branch_set_attr, None) branch_index_string = re.sub('[^0-9]', '', branch_set_attr['branchSetID']) if branch_index_string: branch_index = int(branch_index_string) else: branch_index = 999 for j, (model, weight) in enumerate(models_weights): branch_attr = {'branchID': 'b%dm%d' % (branch_index, j + 1)} branch = Node('logicTreeBranch', branch_attr, None) branch.append(Node('uncertaintyModel', {}, model)) branch.append(Node('uncertaintyWeight', {}, weight)) branch_set.append(branch) branching_level.append(branch_set)
def _pointsources2multipoints(srcs, i): # converts pointSources with the same hpdist, npdist and msr into a # single multiPointSource. allsources = [] for (hd, npd, msr), sources in groupby(srcs, dists).items(): if len(sources) == 1: # there is a single source allsources.extend(sources) continue mfds = [src[3] for src in sources] points = [] usd = [] lsd = [] rar = [] for src in sources: pg = src.pointGeometry points.extend(~pg.Point.pos) usd.append(~pg.upperSeismoDepth) lsd.append(~pg.lowerSeismoDepth) rar.append(~src.ruptAspectRatio) geom = Node('multiPointGeometry') geom.append(Node('gml:posList', text=points)) geom.append(Node('upperSeismoDepth', text=collapse(usd))) geom.append(Node('lowerSeismoDepth', text=collapse(lsd))) node = Node( 'multiPointSource', dict(id='mps-%d' % i, name='multiPointSource-%d' % i), nodes=[geom]) node.append(Node("magScaleRel", text=collapse(msr))) node.append(Node("ruptAspectRatio", text=collapse(rar))) node.append(mfds2multimfd(mfds)) node.append(Node('nodalPlaneDist', nodes=[ Node('nodalPlane', dict(probability=prob, rake=rake, strike=strike, dip=dip)) for prob, rake, strike, dip in npd])) node.append(Node('hypoDepthDist', nodes=[ Node('hypoDepth', dict(depth=depth, probability=prob)) for prob, depth in hd])) allsources.append(node) i += 1 return i, allsources
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)): """ :param node: an :class:`openquake.commonib.node.Node` in NRML 0.4 :param fname: path of the fragility file :returns: an :class:`openquake.commonib.node.Node` in NRML 0.5 """ convert_type = {"lognormal": "logncdf"} new = Node('fragilityModel', dict(assetCategory='building', lossCategory='structural', id='fm_%d_converted_from_NRML_04' % next(fmcounter))) with context(fname, node): fmt = node['format'] descr = ~node.description limit_states = ~node.limitStates new.append(Node('description', {}, descr)) new.append((Node('limitStates', {}, ' '.join(limit_states)))) for ffs in node[2:]: IML = ffs.IML # NB: noDamageLimit = None is different than zero nodamage = ffs.attrib.get('noDamageLimit') ff = Node('fragilityFunction', {'format': fmt}) ff['id'] = ~ffs.taxonomy ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')] if fmt == 'continuous': with context(fname, IML): attr = dict(imt=IML['IMT'], minIML=IML['minIML'], maxIML=IML['maxIML']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr)) for ffc in ffs[2:]: with context(fname, ffc): ls = ffc['ls'] param = ffc.params with context(fname, param): m, s = param['mean'], param['stddev'] ff.append(Node('params', dict(ls=ls, mean=m, stddev=s))) else: # discrete with context(fname, IML): imls = ' '.join(map(str, (~IML)[1])) attr = dict(imt=IML['IMT']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr, imls)) for ffd in ffs[2:]: ls = ffd['ls'] with context(fname, ffd): poes = ' '.join(map(str, ~ffd.poEs)) ff.append(Node('poes', dict(ls=ls), poes)) new.append(ff) return new
def _pointsources2multipoints(srcs, i): allsources = [] for key, sources in groupby(srcs, get_key).items(): if len(sources) == 1: # there is a single source allsources.extend(sources) continue msr, rar, usd, lsd, hd, npd = key mfds = [src[3] for src in sources] points = [] for src in sources: points.extend(~src.pointGeometry.Point.pos) geom = Node('multiPointGeometry') geom.append(Node('gml:posList', text=points)) geom.append(Node('upperSeismoDepth', text=usd)) geom.append(Node('lowerSeismoDepth', text=lsd)) node = Node( 'multiPointSource', dict(id='mps-%d' % i, name='multiPointSource-%d' % i), nodes=[geom]) node.append(Node("magScaleRel", text=msr)) node.append(Node("ruptAspectRatio", text=rar)) node.append(mfds2multimfd(mfds)) node.append(Node('nodalPlaneDist', nodes=[ Node('nodalPlane', dict(probability=prob, rake=rake, strike=strike, dip=dip)) for prob, rake, strike, dip in npd])) node.append(Node('hypoDepthDist', nodes=[ Node('hypoDepth', dict(depth=depth, probability=prob)) for prob, depth in hd])) allsources.append(node) i += 1 return i, allsources
def _pointsources2multipoints(srcs, i): allsources = [] for key, sources in groupby(srcs, get_key).items(): if len(sources) == 1: # there is a single source allsources.extend(sources) continue msr, rar, usd, lsd, hd, npd = key mfds = [src[3] for src in sources] points = [] for src in sources: points.extend(~src.pointGeometry.Point.pos) geom = Node('multiPointGeometry') geom.append(Node('gml:posList', text=points)) geom.append(Node('upperSeismoDepth', text=usd)) geom.append(Node('lowerSeismoDepth', text=lsd)) node = Node('multiPointSource', dict(id='mps-%d' % i, name='multiPointSource-%d' % i), nodes=[geom]) node.append(Node("magScaleRel", text=msr)) node.append(Node("ruptAspectRatio", text=rar)) node.append(mfds2multimfd(mfds)) node.append( Node('nodalPlaneDist', nodes=[ Node( 'nodalPlane', dict(probability=prob, rake=rake, strike=strike, dip=dip)) for prob, rake, strike, dip in npd ])) node.append( Node('hypoDepthDist', nodes=[ Node('hypoDepth', dict(depth=depth, probability=prob)) for prob, depth in hd ])) allsources.append(node) i += 1 return i, allsources
def convert_fragility_model_04(node, fname, fmcounter=itertools.count(1)): """ :param node: an :class:`openquake.commonib.node.Node` in NRML 0.4 :param fname: path of the fragility file :returns: an :class:`openquake.commonib.node.Node` in NRML 0.5 """ convert_type = {"lognormal": "logncdf"} new = Node( 'fragilityModel', dict(assetCategory='building', lossCategory='structural', id='fm_%d_converted_from_NRML_04' % next(fmcounter))) with context(fname, node): fmt = node['format'] descr = ~node.description limit_states = ~node.limitStates new.append(Node('description', {}, descr)) new.append((Node('limitStates', {}, ' '.join(limit_states)))) for ffs in node[2:]: IML = ffs.IML # NB: noDamageLimit = None is different than zero nodamage = ffs.attrib.get('noDamageLimit') ff = Node('fragilityFunction', {'format': fmt}) ff['id'] = ~ffs.taxonomy ff['shape'] = convert_type[ffs.attrib.get('type', 'lognormal')] if fmt == 'continuous': with context(fname, IML): attr = dict(imt=IML['IMT'], minIML=IML['minIML'], maxIML=IML['maxIML']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr)) for ffc in ffs[2:]: with context(fname, ffc): ls = ffc['ls'] param = ffc.params with context(fname, param): m, s = param['mean'], param['stddev'] ff.append(Node('params', dict(ls=ls, mean=m, stddev=s))) else: # discrete with context(fname, IML): imls = ' '.join(map(str, (~IML)[1])) attr = dict(imt=IML['IMT']) if nodamage is not None: attr['noDamageLimit'] = nodamage ff.append(Node('imls', attr, imls)) for ffd in ffs[2:]: ls = ffd['ls'] with context(fname, ffd): poes = ' '.join(map(str, ~ffd.poEs)) ff.append(Node('poes', dict(ls=ls), poes)) new.append(ff) return new
def _pointsources2multipoints(srcs, i): # converts pointSources with the same hpdist, npdist and msr into a # single multiPointSource. allsources = [] for (hd, npd, msr), sources in groupby(srcs, dists).items(): if len(sources) == 1: # there is a single source allsources.extend(sources) continue mfds = [src[3] for src in sources] points = [] usd = [] lsd = [] rar = [] for src in sources: pg = src.pointGeometry points.extend(~pg.Point.pos) usd.append(~pg.upperSeismoDepth) lsd.append(~pg.lowerSeismoDepth) rar.append(~src.ruptAspectRatio) geom = Node('multiPointGeometry') geom.append(Node('gml:posList', text=points)) geom.append(Node('upperSeismoDepth', text=collapse(usd))) geom.append(Node('lowerSeismoDepth', text=collapse(lsd))) node = Node('multiPointSource', dict(id='mps-%d' % i, name='multiPointSource-%d' % i), nodes=[geom]) node.append(Node("magScaleRel", text=collapse(msr))) node.append(Node("ruptAspectRatio", text=collapse(rar))) node.append(mfds2multimfd(mfds)) node.append( Node('nodalPlaneDist', nodes=[ Node( 'nodalPlane', dict(probability=prob, rake=rake, strike=strike, dip=dip)) for prob, rake, strike, dip in npd ])) node.append( Node('hypoDepthDist', nodes=[ Node('hypoDepth', dict(depth=depth, probability=prob)) for prob, depth in hd ])) allsources.append(node) i += 1 return i, allsources