def get_mfds(mvect, mxvect, tvect, dec_tvect, ev_dict, mcomps, ycomps, ymax, mrng, src_mmax, \ src_mmin_reg, src_bval_fix, src_bval_fix_sd, bin_width, poly): # remove incomplete events based on original preferred magnitudes (mxvect) mvect, mxvect, tvect, dec_tvect, ev_dict, out_idx, ev_out = \ remove_incomplete_events(mvect, mxvect, tvect, dec_tvect, ev_dict, mcomps, ycomps, bin_width) # get annualised rates using preferred MW (mvect) cum_rates, cum_num, bin_rates, n_obs, n_yrs = \ get_annualised_rates(mcomps, ycomps, mvect, mrng, bin_width, ymax) ############################################################################### # calculate MFDs if at least 50 events ############################################################################### # get index of min reg mag and valid mag bins diff_cum = abs(hstack((diff(cum_rates), 0.))) midx = where((mrng >= src_mmin_reg - bin_width / 2.) & (isfinite(diff_cum)))[0] # check if length of midx = 0 and get highest non-zero mag if len(midx) == 0: midx = [where(isfinite(diff_cum))[0][-1]] # make sure there is at least 4 observations for b-value calculations if len(midx) < 5: idxstart = midx[0] - 1 while idxstart >= 0 and len(midx) < 5: # if num observations greater than zero, add to midx if n_obs[idxstart] > 0: midx = hstack((idxstart, midx)) print ' get lower mag T', midx idxstart -= 1 # first, check if using fixed bval and fit curve using to solve for N0 if src_bval_fix > 0: print ' Using fixed b-value =', src_bval_fix, src_bval_fix_sd # set source beta bval = src_bval_fix beta = bval2beta(bval) sigb = src_bval_fix_sd sigbeta = bval2beta(sigb) # get dummy curve dummyN0 = 1. m_min_reg = src_mmin_reg + bin_width / 2. bc_tmp, bc_mrng = get_oq_incrementalMFD(beta, dummyN0, m_min_reg, src_mmax, bin_width) # fit to lowest mahnitude considered bc_lo100 = cum_rates[midx][0] * (bc_tmp / bc_tmp[0]) # scale for N0 fn0 = 10**(log10(bc_lo100[0]) + beta2bval(beta) * bc_mrng[0]) # do Aki ML first if N events less than 50 elif len(mvect) >= 50 and len(mvect) < 80: # do Aki max likelihood bval, sigb = aki_maximum_likelihood( mrng[midx] + bin_width / 2, n_obs[midx], 0.) # assume completeness taken care of beta = bval2beta(bval) sigbeta = bval2beta(sigb) # now recalc N0 dummyN0 = 1. bc_tmp, bc_mrng = get_oq_incrementalMFD(beta, dummyN0, mrng[0], src_mmax, bin_width) # fit to lowest magnitude considered and observed Nminmag = cum_rates[midx][0] * (bc_tmp / bc_tmp[0]) # !!!!!! check into why this must be done - I suspect it may be that there is an Mmax eq in the zones !!!! fidx = midx[0] # solve for N0 fn0 = 10**(log10(Nminmag[0]) + bval * bc_mrng[fidx]) print ' Aki ML b-value =', bval, sigb # do Weichert for zones with more events elif len(mvect) >= 80: # calculate weichert bval, sigb, a_m, siga_m, fn0, stdfn0 = weichert_algorithm(array(n_yrs[midx]), \ mrng[midx]+bin_width/2, n_obs[midx], mrate=0.0, \ bval=1.1, itstab=1E-4, maxiter=1000) beta = bval2beta(bval) sigbeta = bval2beta(sigb) print ' Weichert b-value = ', bval, sigb ############################################################################### # calculate MFDs using NSHA13_Background if fewer than 50 events ############################################################################### else: print 'Getting b-value from NSHA Background...' # set B-value to nan bval = nan # load Leonard zones lsf = shapefile.Reader( path.join('shapefiles', 'NSHA13_Background', 'NSHA13_Background_NSHA18_MFD.shp')) # get Leonard polygons l08_shapes = lsf.shapes() # get Leonard b-values lbval = get_field_data(lsf, 'BVAL_BEST', 'str') # get centroid of current poly clon, clat = get_shapely_centroid(poly) point = Point(clon, clat) # loop through zones and find point in poly for zone_bval, l_shape in zip(lbval, l08_shapes): l_poly = Polygon(l_shape.points) # check if leonard centroid in domains poly if point.within(l_poly): bval = float(zone_bval) # for those odd sites outside of L08 bounds, assign b-vale if isnan(bval): bval = 0.85 beta = bval2beta(bval) sigb = 0.1 sigbeta = bval2beta(sigb) # solve for N0 fn0 = fit_a_value(bval, mrng, cum_rates, src_mmax, bin_width, midx) print ' Leonard2008 b-value =', bval, sigb # get confidence intervals err_up, err_lo = get_confidence_intervals(n_obs, cum_rates) return bval, beta, sigb, sigbeta, fn0, cum_rates, ev_out, err_up, err_lo
def write_oq_sourcefile(model, modelpath, logicpath, multimods): """ model = a list of dictionaries for each area source modelpath = folder for sources to be included in source_model_logic_tree.xml logicpath = folder for logic tree multimods = argv[2] # for setting weights of alternative models (True or False) """ from oq_tools import beta2bval, get_line_parallels from numpy import array, log10, max, min, tan, radians, unique, isinf from os import path # set big bbox params bbmaxlon = -180 bbmaxlat = -90 bbminlon = 180 bbminlat = 90 # Write 1 model file betalist = ['bb', 'bl', 'bu'] maglist = ['mb', 'ml', 'mu'] srcxmls = [] # make xml header header = '<?xml version="1.0" encoding="utf-8"?>\n' header += '<nrml xmlns:gml="http://www.opengis.net/gml"\n' header += ' xmlns="http://openquake.org/xmlns/nrml/0.4">\n\n' # set wieghts bval_wt = [0.68, 0.16, 0.16] max_mag_wt = [0.60, 0.30, 0.10] branch_wt = [] outbase = path.split(modelpath)[-1] # start xml text newxml = header + ' <sourceModel name="' + outbase + '_collapsed">\n\n' # get src codes and rename if duplicated codes = [] for m in model: codes.append(m['src_code']) ucodes = unique(codes) # start loop thru area sources for m in model: ####################################################################### # write area sources ####################################################################### if m['src_type'] == 'area': print m['src_type'] # rename source code if "." exists m['src_code'].replace('.', '') newxml += ' <areaSource id="'+m['src_code']+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['trt']+'">\n' newxml += ' <areaGeometry>\n' newxml += ' <gml:Polygon>\n' newxml += ' <gml:exterior>\n' newxml += ' <gml:LinearRing>\n' newxml += ' <gml:posList>\n' # get polygon text polytxt = '' for xy in m['src_shape'][:-1]: # no need to close poly polytxt = polytxt + ' ' + str("%0.4f" % xy[0]) \ + ' ' + str("%0.4f" % xy[1]) + '\n' newxml += polytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LinearRing>\n' newxml += ' </gml:exterior>\n' newxml += ' </gml:Polygon>\n' ################################################################### # print model bbox of model # this is not required for the nrml files, but useful for setting up job.ini files buff = 0.1 maxlon = max(m['src_shape'][:, 0]) + buff minlon = min(m['src_shape'][:, 0]) - buff maxlat = max(m['src_shape'][:, 1]) + buff minlat = min(m['src_shape'][:, 1]) - buff # get big bbox if maxlon > bbmaxlon: bbmaxlon = maxlon if minlon < bbminlon: bbminlon = minlon if maxlat > bbmaxlat: bbmaxlat = maxlat if minlat < bbminlat: bbminlat = minlat print m[ 'src_code'], minlon, minlat, ',', minlon, maxlat, ',', maxlon, maxlat, ',', maxlon, minlat ################################################################### # set depth distribution if min(m['src_dep']) != max(m['src_dep']): newxml += ' <upperSeismoDepth>' + str( "%0.1f" % min(m['src_dep'])) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( "%0.1f" % max(m['src_dep'])) + '</lowerSeismoDepth>\n' else: newxml += ' <upperSeismoDepth>' + str( "%0.1f" % (min(m['src_dep']) - 10)) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( "%0.1f" % (min(m['src_dep']) + 10)) + '</lowerSeismoDepth>\n' newxml += ' </areaGeometry>\n' newxml += ' <magScaleRel>WC1994</magScaleRel>\n' newxml += ' <ruptAspectRatio>2.0</ruptAspectRatio>\n' # get weighted rates binwid = 0.1 octxt = make_collapse_occurrence_text(m, binwid) newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid)) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' """ # set GR recurrence pars tmpN0 = m['src_N0'][i] tmpbeta = m['src_beta'][i] tmpmmax = m['max_mag'][j] grtxt = ''.join((' <truncGutenbergRichterMFD aValue="', \ str("%0.4f" % log10(tmpN0)),'" bValue="', \ str("%0.4f" % beta2bval(tmpbeta)),'" minMag="', \ str("%0.2f" % m['min_mag']),'" maxMag="', \ str("%0.2f" % tmpmmax),'"/>\n')) newxml += grtxt """ # set nodal planes newxml += ' <nodalPlaneDist>\n' newxml += ' <nodalPlane probability="0.125" strike="0.0" dip="90.0" rake="0.0" />\n' newxml += ' <nodalPlane probability="0.125" strike="45.0" dip="90.0" rake="0.0" />\n' newxml += ' <nodalPlane probability="0.125" strike="90.0" dip="90.0" rake="0.0" />\n' newxml += ' <nodalPlane probability="0.125" strike="135.0" dip="90.0" rake="0.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="0.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="45.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="90.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="135.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="180.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="225.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="270.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.0625" strike="315.0" dip="30.0" rake="90.0" />\n' newxml += ' </nodalPlaneDist>\n' # set hypo depth newxml += ' <hypoDepthDist>\n' newxml += ' <hypoDepth probability="0.50" depth="'+str("%0.1f" % m['src_dep'][0])+'"/>\n' \ +' <hypoDepth probability="0.25" depth="'+str("%0.1f" % m['src_dep'][1])+'"/>\n' \ +' <hypoDepth probability="0.25" depth="'+str("%0.1f" % m['src_dep'][2])+'"/>\n' newxml += ' </hypoDepthDist>\n' newxml += ' </areaSource>\n\n' ####################################################################### # now make fault sources ####################################################################### elif m['src_type'] == 'fault': # rename source code if "." exists m['src_code'].replace('.', '') if isinf(log10(m['src_N0'][0])) == False: ################################################################### # do complex faults ################################################################### if m['fault_dip'][0] != m['fault_dip'][1]: #if m['fault_dip'][0] >= 0: # catches all faults #if m['fault_dip'][0] > 0: # id subcript idsub = str("%0.1f" % beta2bval(m['src_beta'][0])) idsub = idsub.replace(".", "") newxml += ' <complexFaultSource id="'+src_code+idsub+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['trt']+'">\n' newxml += ' <complexFaultGeometry>\n' newxml += ' <faultTopEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from surface projection # get upper h-dist upperhdist = m['src_dep'][0] / tan( radians(m['fault_dip'][0])) upperxy = get_line_parallels(m['src_shape'], upperhdist)[0] # make upper text xytxt = '' for xy in upperxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][0])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </faultTopEdge>\n' newxml += ' <intermediateEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from upper edge # get intermediate h-dist interhdist = (m['src_dep'][1] - m['src_dep'][0]) / tan( radians(m['fault_dip'][0])) interxy = get_line_parallels(upperxy, interhdist)[0] # make intermediate text xytxt = '' for xy in interxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][1])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </intermediateEdge>\n' newxml += ' <faultBottomEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from intermediate edge # get bottom h-dist bottomhdist = (m['src_dep'][2] - m['src_dep'][1]) / tan( radians(m['fault_dip'][1])) bottomxy = get_line_parallels(interxy, bottomhdist)[0] # make bottom text xytxt = '' for xy in bottomxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][2])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </faultBottomEdge>\n' newxml += ' </complexFaultGeometry>\n' ''' # get fault area scaling model ''' #src_code = m['src_code'] if src_code.startswith('CIS'): newxml += ' <magScaleRel>GSCCascadia</magScaleRel>\n' elif src_code.startswith('WIN'): newxml += ' <magScaleRel>GSCOffshoreThrustsWIN</magScaleRel>\n' elif src_code.startswith('HGT'): newxml += ' <magScaleRel>GSCOffshoreThrustsHGT</magScaleRel>\n' elif src_code.startswith('QCSS') or src_code.startswith( 'FWF'): newxml += ' <magScaleRel>WC1994_QCSS</magScaleRel>\n' elif src_code.startswith('EISO'): newxml += ' <magScaleRel>GSCEISO</magScaleRel>\n' elif src_code.startswith('EISB'): newxml += ' <magScaleRel>GSCEISB</magScaleRel>\n' elif src_code.startswith('EISI'): newxml += ' <magScaleRel>GSCEISI</magScaleRel>\n' else: newxml += ' <magScaleRel>WC1994</magScaleRel>\n' newxml += ' <ruptAspectRatio>1.0</ruptAspectRatio>\n' ''' # now get appropriate MFD ''' # do incremental MFD if m['src_beta'][0] > -99: # adjust N0 value to account for weighting of fault sources octxt = make_collapse_occurrence_text(m, binwid) # make text newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid) ) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' if m['fault_dip'][0] != 90.: newxml += ' <rake>90.0</rake>\n' else: newxml += ' <rake>0.0</rake>\n' newxml += ' </complexFaultSource>\n\n' ################################################################### # else do simple fault ################################################################### elif m['fault_dip'][0] == m['fault_dip'][1]: # id subcript idsub = str("%0.1f" % beta2bval(m['src_beta'][0])) idsub = idsub.replace(".", "") newxml += ' <simpleFaultSource id="'+m['src_code']+idsub+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['trt']+'">\n' newxml += ' <simpleFaultGeometry>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # simple fauls use surface projection! ''' # calculate lat lons from surface projection # get upper h-dist upperhdist = m['src_dep'][0] / tan(radians(m['fault_dip'][0])) upperxy = get_line_parallels(m['src_shape'], upperhdist)[0] ''' xytxt = '' for xy in m['src_shape']: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' <dip>' + str( m['fault_dip'][0]) + '</dip>\n' newxml += ' <upperSeismoDepth>' + str( m['src_dep'][0]) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( m['src_dep'][-1]) + '</lowerSeismoDepth>\n' newxml += ' </simpleFaultGeometry>\n' ''' # get fault area scaling model ''' src_code = m['src_code'] if src_code == 'CIS': newxml += ' <magScaleRel>GSCCascadia</magScaleRel>\n' elif src_code.startswith('WIN'): newxml += ' <magScaleRel>GSCOffshoreThrustsWIN</magScaleRel>\n' elif src_code.startswith('HGT'): newxml += ' <magScaleRel>GSCOffshoreThrustsHGT</magScaleRel>\n' elif src_code.startswith('QCSS') or src_code.startswith( 'FWF'): newxml += ' <magScaleRel>WC1994_QCSS</magScaleRel>\n' elif src_code.startswith('EISO'): newxml += ' <magScaleRel>GSCEISO</magScaleRel>\n' elif src_code.startswith('EISB'): newxml += ' <magScaleRel>GSCEISB</magScaleRel>\n' elif src_code.startswith('EISI'): newxml += ' <magScaleRel>GSCEISI</magScaleRel>\n' else: newxml += ' <magScaleRel>WC1994</magScaleRel>\n' newxml += ' <ruptAspectRatio>1.0</ruptAspectRatio>\n' #newxml += ' <ruptAspectRatio>2.0</ruptAspectRatio>\n' ''' # now get appropriate MFD ''' # do incremental MFD if m['src_beta'][0] > -99: octxt = make_collapse_occurrence_text(m, binwid) # make text newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid) ) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' if m['fault_dip'][0] != 90.: newxml += ' <rake>90.0</rake>\n' else: newxml += ' <rake>0.0</rake>\n' newxml += ' </simpleFaultSource>\n\n' # finish nrml newxml += ' </sourceModel>\n' newxml += '</nrml>' # write Big BBOX print '\nBBOX:', bbminlon, bbminlat, ',', bbminlon, bbmaxlat, ',', bbmaxlon, bbmaxlat, ',', bbmaxlon, bbminlat # write new data to file outxml = path.join(modelpath, ''.join( (outbase, '_collapsed_rates_FF.xml'))) #outxml = '/'.join((src_folder, ''.join((outbase,'_',bl,'_',ml,'.xml')))) f = open(outxml, 'w') f.write(newxml) f.close() srcxmls.append(outxml) ###################################################################### # now that the source file have been written, make the logic tree file ###################################################################### # if multimodel - adjust weights if multimods == 'True': branch_wt = array(branch_wt) branch_wt *= m['src_reg_wt'] print 'Branch Weights: ', m['src_reg_wt'] #else: # full_wt = concatenate((branch_wt, branch_wt, branch_wt)) newxml = '<?xml version="1.0" encoding="UTF-8"?>\n' newxml += '<nrml xmlns:gml="http://www.opengis.net/gml"\n' newxml += ' xmlns="http://openquake.org/xmlns/nrml/0.4">\n\n' newxml += ' <logicTree logicTreeID="lt1">\n' newxml += ' <logicTreeBranchingLevel branchingLevelID="bl1">\n' newxml += ' <logicTreeBranchSet uncertaintyType="sourceModel"\n' \ ' branchSetID="bs1">\n\n' # make branches for i, branch in enumerate(srcxmls): logictreepath = logicpath + '/' + path.split(branch)[-1] newxml += ' <logicTreeBranch branchID="b' + str( i + 1) + '">\n' newxml += ' <uncertaintyModel>' + logictreepath + '</uncertaintyModel>\n' newxml += ' <uncertaintyWeight>' + str( m['src_reg_wt']) + '</uncertaintyWeight>\n' newxml += ' </logicTreeBranch>\n\n' newxml += ' </logicTreeBranchSet>\n' newxml += ' </logicTreeBranchingLevel>\n' newxml += ' </logicTree>\n' newxml += '</nrml>' # write logic tree to file outxml = path.join(logicpath, ''.join( (outbase, '_source_model_logic_tree_FF.xml'))) f = open(outxml, 'w') f.write(newxml) f.close()
def get_mfds(mvect, mxvect, tvect, dec_tvect, ev_dict, mcomps, ycomps, ymax, mrng, src_mmax, \ src_mmin_reg, src_bval_fix, src_bval_fix_sd, bin_width, poly): # remove incomplete events based on original preferred magnitudes (mxvect) mvect, mxvect, tvect, dec_tvect, ev_dict, out_idx, ev_out = \ remove_incomplete_events(mvect, mxvect, tvect, dec_tvect, ev_dict, mcomps, ycomps, bin_width) # get annualised rates using preferred MW (mvect) cum_rates, cum_num, bin_rates, n_obs, n_yrs = \ get_annualised_rates(mcomps, ycomps, mvect, mrng, bin_width, ymax) print(' Number of events:', len(mvect)) #print(cum_rates ############################################################################### # calculate MFDs if at least 50 events ############################################################################### # get index of min reg mag and valid mag bins diff_cum = abs(hstack((diff(cum_rates), 0.))) midx = where((mrng >= src_mmin_reg - bin_width / 2.) & (isfinite(diff_cum)))[0] # check if length of midx = 0 and get highest non-zero mag if len(midx) == 0: midx = [where(isfinite(diff_cum))[0][-1]] # make sure there is at least 4 observations for b-value calculations if len(midx) < 5: idxstart = midx[0] - 1 while idxstart >= 0 and len(midx) < 5: # if num observations greater than zero, add to midx if n_obs[idxstart] > 0: midx = hstack((idxstart, midx)) print(' get lower mag T', midx) idxstart -= 1 # first, check if using fixed bval and fit curve using to solve for N0 if src_bval_fix > 0: print(' Using fixed b-value =', src_bval_fix, src_bval_fix_sd) # set source beta bval = src_bval_fix beta = bval2beta(bval) sigb = src_bval_fix_sd sigbeta = bval2beta(sigb) # get dummy curve dummyN0 = 1. m_min_reg = src_mmin_reg + bin_width / 2. bc_tmp, bc_mrng = get_oq_incrementalMFD(beta, dummyN0, m_min_reg, src_mmax, bin_width) # fit to lowest mahnitude considered bc_lo100 = cum_rates[midx][0] * (bc_tmp / bc_tmp[0]) # scale for N0 fn0 = 10**(log10(bc_lo100[0]) + beta2bval(beta) * bc_mrng[0]) # do Aki ML first if N events less than 80 elif len(mvect) >= 30 and len(mvect) < 80: # do Aki max likelihood bval, sigb = aki_maximum_likelihood( mrng[midx] + bin_width / 2, n_obs[midx], 0.) # assume completeness taken care of beta = bval2beta(bval) sigbeta = bval2beta(sigb) # now recalc N0 dummyN0 = 1. bc_tmp, bc_mrng = get_oq_incrementalMFD(beta, dummyN0, mrng[0], src_mmax, bin_width) # fit to lowest magnitude considered and observed Nminmag = cum_rates[midx][0] * (bc_tmp / bc_tmp[0]) # !!!!!! check into why this must be done - I suspect it may be that there is an Mmax eq in the zones !!!! fidx = midx[0] # solve for N0 fn0 = 10**(log10(Nminmag[0]) + bval * bc_mrng[fidx]) print(' Aki ML b-value =', bval, sigb) # do Weichert for zones with more events elif len(mvect) >= 80: # calculate weichert bval, sigb, a_m, siga_m, fn0, stdfn0 = weichert_algorithm(array(n_yrs[midx]), \ mrng[midx]+bin_width/2, n_obs[midx], mrate=0.0, \ bval=1.1, itstab=1E-4, maxiter=1000) beta = bval2beta(bval) sigbeta = bval2beta(sigb) print(' Weichert b-value =', str('%0.3f' % bval), str('%0.3f' % sigb)) ############################################################################### # calculate MFDs using NSHA13_Background if fewer than 50 events ############################################################################### else: print('Setting b-value to 1.0...') bval = 1.0 beta = bval2beta(bval) sigb = 0.1 sigbeta = bval2beta(sigb) # solve for N0 fn0 = fit_a_value(bval, mrng, cum_rates, src_mmax, bin_width, midx) print(' Automatic b-value =', bval, sigb) ############################################################################### # get confidence intervals ############################################################################### err_up, err_lo = get_confidence_intervals(n_obs, cum_rates) return bval, beta, sigb, sigbeta, fn0, cum_rates, ev_out, err_up, err_lo
def write_oq_sourcefile(model, meta, mx_dict): """ model = a list of dictionaries for each area source modelpath = folder for sources to be included in source_model_logic_tree.xml logicpath = folder for logic tree multimods = argv[2] # for setting weights of alternative models (True or False) meta = True gives weight of 1 to best Mmax and b-value """ from oq_tools import beta2bval, get_line_parallels from numpy import log10, max, min, tan, radians, isinf from os import path # set big bbox params bbmaxlon = -180 bbmaxlat = -90 bbminlon = 180 bbminlat = 90 # make xml header header = '<?xml version="1.0" encoding="utf-8"?>\n' header += '<nrml xmlns:gml="http://www.opengis.net/gml"\n' header += ' xmlns="http://openquake.org/xmlns/nrml/0.4">\n\n' # set wieghts bval_wt = [0.68, 0.16, 0.16] max_mag_wt = [0.60, 0.10, 0.30] binwid = 0.1 # set rupture aspect ratio aspectratio = '1.5' # balance between L14 and Cea14 surface rupture lengths outbase = path.split(meta['modelPath'])[-1] # start xml text newxml = header + ' <sourceModel name="' + outbase + '_collapsed">\n\n' # get src codes and rename if duplicated codes = [] for m in model: codes.append(m['src_code']) #ucodes = unique(codes) # start loop thru area sources for m in model: print m['trt'] # set TRT if m['trt'] == 'active': trt = 'Active Shallow Crust' elif m['trt'] == 'stable': trt = 'Stable Shallow Crust' elif m['trt'] == 'interface': trt = 'Subduction Interface' elif m['trt'] == 'intraslab': trt = 'Subduction Intraslab' # comment out sources with null activitiy rates if m['src_N0'][-1] == -99.0: newxml += ' <!--\n' ####################################################################### # write area sources ####################################################################### if m['src_type'] == 'area': print m['src_type'] # rename source code if "." exists m['src_code'].replace('.', '') newxml += ' <areaSource id="'+m['src_code']+'" name="'+\ m['src_name']+'" tectonicRegion="'+trt+'">\n' newxml += ' <areaGeometry>\n' newxml += ' <gml:Polygon>\n' newxml += ' <gml:exterior>\n' newxml += ' <gml:LinearRing>\n' newxml += ' <gml:posList>\n' # get polygon text polytxt = '' for xy in m['src_shape'][:-1]: # no need to close poly polytxt = polytxt + ' ' + str("%0.4f" % xy[0]) \ + ' ' + str("%0.4f" % xy[1]) + '\n' newxml += polytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LinearRing>\n' newxml += ' </gml:exterior>\n' newxml += ' </gml:Polygon>\n' ################################################################### # print model bbox of model # this is not required for the nrml files, but useful for setting up job.ini files buff = 0.1 maxlon = max(m['src_shape'][:, 0]) + buff minlon = min(m['src_shape'][:, 0]) - buff maxlat = max(m['src_shape'][:, 1]) + buff minlat = min(m['src_shape'][:, 1]) - buff # get big bbox if maxlon > bbmaxlon: bbmaxlon = maxlon if minlon < bbminlon: bbminlon = minlon if maxlat > bbmaxlat: bbmaxlat = maxlat if minlat < bbminlat: bbminlat = minlat print m[ 'src_code'], minlon, minlat, ',', minlon, maxlat, ',', maxlon, maxlat, ',', maxlon, minlat ################################################################### # set depth distribution if min(m['src_dep']) != max(m['src_dep']): newxml += ' <upperSeismoDepth>' + str( "%0.1f" % (min(m['src_dep']) - 5.0)) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( "%0.1f" % (max(m['src_dep']) + 5.0)) + '</lowerSeismoDepth>\n' #newxml += ' <upperSeismoDepth>0.0</upperSeismoDepth>\n' #newxml += ' <lowerSeismoDepth>20.0</lowerSeismoDepth>\n' else: newxml += ' <upperSeismoDepth>' + str( "%0.1f" % (min(m['src_dep']) - 10)) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( "%0.1f" % (min(m['src_dep']) + 10)) + '</lowerSeismoDepth>\n' newxml += ' </areaGeometry>\n' newxml += ' <magScaleRel>WC1994</magScaleRel>\n' #newxml += ' <ruptAspectRatio>2.0</ruptAspectRatio>\n' newxml += ' <ruptAspectRatio>' + aspectratio + '</ruptAspectRatio>\n' # get weighted rates octxt = make_collapse_occurrence_text(m, binwid, meta, mx_dict) newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid)) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' """ # set GR recurrence pars tmpN0 = m['src_N0'][i] tmpbeta = m['src_beta'][i] tmpmmax = m['max_mag'][j] grtxt = ''.join((' <truncGutenbergRichterMFD aValue="', \ str("%0.4f" % log10(tmpN0)),'" bValue="', \ str("%0.4f" % beta2bval(tmpbeta)),'" minMag="', \ str("%0.2f" % m['min_mag']),'" maxMag="', \ str("%0.2f" % tmpmmax),'"/>\n')) newxml += grtxt """ # set nodal planes newxml += ' <nodalPlaneDist>\n' newxml += ' <nodalPlane probability="0.3" strike="0.0" dip="30.0" rake="90.0" />\n' # newxml += ' <nodalPlane probability="0.0625" strike="45.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.2" strike="90.0" dip="30.0" rake="90.0" />\n' # newxml += ' <nodalPlane probability="0.0625" strike="135.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.3" strike="180.0" dip="30.0" rake="90.0" />\n' # newxml += ' <nodalPlane probability="0.0625" strike="225.0" dip="30.0" rake="90.0" />\n' newxml += ' <nodalPlane probability="0.2" strike="270.0" dip="30.0" rake="90.0" />\n' # newxml += ' <nodalPlane probability="0.0625" strike="315.0" dip="30.0" rake="90.0" />\n' newxml += ' </nodalPlaneDist>\n' # set hypo depth newxml += ' <hypoDepthDist>\n' newxml += ' <hypoDepth probability="0.50" depth="'+str("%0.1f" % m['src_dep'][0])+'"/>\n' \ +' <hypoDepth probability="0.25" depth="'+str("%0.1f" % m['src_dep'][1])+'"/>\n' \ +' <hypoDepth probability="0.25" depth="'+str("%0.1f" % m['src_dep'][2])+'"/>\n' newxml += ' </hypoDepthDist>\n' if m['src_N0'][-1] == -99.0: newxml += ' </areaSource>\n' else: newxml += ' </areaSource>\n\n' ####################################################################### # now make fault sources ####################################################################### elif m['src_type'] == 'fault': # rename source code if "." exists m['src_code'].replace('.', '') src_code = m['src_code'] print src_code if isinf(log10(m['src_N0'][0])) == False: ################################################################### # do complex faults ################################################################### if m['fault_dip'][0] != m['fault_dip'][1]: #if m['fault_dip'][0] >= 0: # catches all faults #if m['fault_dip'][0] > 0: # id subcript idsub = str("%0.1f" % beta2bval(m['src_beta'][0])) idsub = idsub.replace(".", "") newxml += ' <complexFaultSource id="'+src_code+idsub+'" name="'+\ m['src_name']+'" tectonicRegion="'+trt+'">\n' newxml += ' <complexFaultGeometry>\n' newxml += ' <faultTopEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from surface projection # get upper h-dist upperhdist = m['src_dep'][0] / tan( radians(m['fault_dip'][0])) upperxy = get_line_parallels(m['src_shape'], upperhdist)[0] # make upper text xytxt = '' for xy in upperxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][0])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </faultTopEdge>\n' newxml += ' <intermediateEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from upper edge # get intermediate h-dist interhdist = (m['src_dep'][1] - m['src_dep'][0]) / tan( radians(m['fault_dip'][0])) interxy = get_line_parallels(upperxy, interhdist)[0] # make intermediate text xytxt = '' for xy in interxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][1])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </intermediateEdge>\n' newxml += ' <faultBottomEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from intermediate edge # get bottom h-dist bottomhdist = (m['src_dep'][2] - m['src_dep'][1]) / tan( radians(m['fault_dip'][1])) bottomxy = get_line_parallels(interxy, bottomhdist)[0] # make bottom text xytxt = '' for xy in bottomxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][2])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </faultBottomEdge>\n' newxml += ' </complexFaultGeometry>\n' ''' # get fault area scaling model ''' #src_code = m['src_code'] if src_code.startswith('CIS'): newxml += ' <magScaleRel>GSCCascadia</magScaleRel>\n' elif src_code.startswith('WIN'): newxml += ' <magScaleRel>GSCOffshoreThrustsWIN</magScaleRel>\n' elif src_code.startswith('HGT'): newxml += ' <magScaleRel>GSCOffshoreThrustsHGT</magScaleRel>\n' elif src_code.startswith('QCSS') or src_code.startswith( 'FWF'): newxml += ' <magScaleRel>WC1994_QCSS</magScaleRel>\n' elif src_code.startswith('EISO'): newxml += ' <magScaleRel>GSCEISO</magScaleRel>\n' elif src_code.startswith('EISB'): newxml += ' <magScaleRel>GSCEISB</magScaleRel>\n' elif src_code.startswith('EISI'): newxml += ' <magScaleRel>GSCEISI</magScaleRel>\n' else: newxml += ' <magScaleRel>Leonard2014_SCR</magScaleRel>\n' #newxml += ' <magScaleRel>WC1994</magScaleRel>\n' newxml += ' <ruptAspectRatio>' + aspectratio + '</ruptAspectRatio>\n' ''' # now get appropriate MFD ''' # do incremental MFD if m['src_beta'][0] > -99: # adjust N0 value to account for weighting of fault sources octxt = make_collapse_occurrence_text( m, binwid, meta, mx_dict) # make text newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid) ) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' if m['fault_dip'][0] != 90.: newxml += ' <rake>90.0</rake>\n' else: newxml += ' <rake>0.0</rake>\n' newxml += ' </complexFaultSource>\n\n' ################################################################### # else do simple fault ################################################################### elif m['fault_dip'][0] == m['fault_dip'][1]: # id subcript idsub = str("%0.1f" % beta2bval(m['src_beta'][0])) idsub = idsub.replace(".", "") newxml += ' <simpleFaultSource id="'+m['src_code']+idsub+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['trt']+'">\n' newxml += ' <simpleFaultGeometry>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # simple fauls use surface projection! ''' # calculate lat lons from surface projection # get upper h-dist upperhdist = m['src_dep'][0] / tan(radians(m['fault_dip'][0])) upperxy = get_line_parallels(m['src_shape'], upperhdist)[0] ''' xytxt = '' for xy in m['src_shape']: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' <dip>' + str( m['fault_dip'][0]) + '</dip>\n' newxml += ' <upperSeismoDepth>' + str( m['src_dep'][0]) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( m['src_dep'][-1]) + '</lowerSeismoDepth>\n' newxml += ' </simpleFaultGeometry>\n' ''' # get fault area scaling model ''' src_code = m['src_code'] if src_code == 'CIS': newxml += ' <magScaleRel>GSCCascadia</magScaleRel>\n' elif src_code.startswith('WIN'): newxml += ' <magScaleRel>GSCOffshoreThrustsWIN</magScaleRel>\n' elif src_code.startswith('HGT'): newxml += ' <magScaleRel>GSCOffshoreThrustsHGT</magScaleRel>\n' elif src_code.startswith('QCSS') or src_code.startswith( 'FWF'): newxml += ' <magScaleRel>WC1994_QCSS</magScaleRel>\n' elif src_code.startswith('EISO'): newxml += ' <magScaleRel>GSCEISO</magScaleRel>\n' elif src_code.startswith('EISB'): newxml += ' <magScaleRel>GSCEISB</magScaleRel>\n' elif src_code.startswith('EISI'): newxml += ' <magScaleRel>GSCEISI</magScaleRel>\n' else: newxml += ' <magScaleRel>Leonard2014_SCR</magScaleRel>\n' #newxml += ' <magScaleRel>WC1994</magScaleRel>\n' newxml += ' <ruptAspectRatio>' + aspectratio + '</ruptAspectRatio>\n' #newxml += ' <ruptAspectRatio>2.0</ruptAspectRatio>\n' ''' # now get appropriate MFD ''' # do incremental MFD if m['src_beta'][0] > -99: octxt = make_collapse_occurrence_text( m, binwid, meta, mx_dict) # make text newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid) ) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' if m['fault_dip'][0] != 90.: newxml += ' <rake>90.0</rake>\n' else: newxml += ' <rake>0.0</rake>\n' if m['src_N0'][-1] == -99.0: newxml += ' </simpleFaultSource>\n' else: newxml += ' </simpleFaultSource>\n\n' # comment sources with null activity rates if m['src_N0'][-1] == -99.0: newxml += ' -->\n\n' # finish nrml newxml += ' </sourceModel>\n' newxml += '</nrml>' # write Big BBOX print '\nBBOX:', bbminlon, bbminlat, ',', bbminlon, bbmaxlat, ',', bbmaxlon, bbmaxlat, ',', bbmaxlon, bbminlat # write new data to file outxml = path.join(meta['modelPath'], meta['modelFile']) #outxml = '/'.join((src_folder, ''.join((outbase,'_',bl,'_',ml,'.xml')))) f = open(outxml, 'w') f.write(newxml) f.close() #srcxmls.append(outxml) return outxml
def write_oq_sourcefile(model, meta, mx_dict): """ model = a list of dictionaries for each area source modelpath = folder for sources to be included in source_model_logic_tree.xml logicpath = folder for logic tree multimods = argv[2] # for setting weights of alternative models (True or False) meta = True gives weight of 1 to best Mmax and b-value """ from oq_tools import beta2bval, get_line_parallels from numpy import log10, max, min, tan, radians, isinf, floor from os import path # set big bbox params bbmaxlon = -180 bbmaxlat = -90 bbminlon = 180 bbminlat = 90 # make xml header header = '<?xml version="1.0" encoding="utf-8"?>\n' header += '<nrml xmlns:gml="http://www.opengis.net/gml"\n' header += ' xmlns="http://openquake.org/xmlns/nrml/0.4">\n\n' ''' # set wieghts bval_wt = [0.68, 0.16, 0.16] max_mag_wt = [0.60, 0.30, 0.10] ''' outbase = path.split(meta['modelPath'])[-1] # start xml text newxml = header + ' <sourceModel name="' + outbase + '_collapsed">\n\n' # get src codes and rename if duplicated codes = [] for m in model: codes.append(m['src_code']) #ucodes = unique(codes) # start loop thru area sources for m in model: # set magScaleRel if float(m['class']) <= 7.: magScaleRel = 'Leonard2014_SCR' ruptAspectRatio = 1.5 # balance between L14 and Cea14 surface rupture lengths min_mag = 4.5 elif float(m['class']) == 8 or float(m['class']) == 9: magScaleRel = 'WC1994' ruptAspectRatio = 1.5 min_mag = 5.5 elif float(m['class']) == 10: magScaleRel = 'StrasserInterface' ruptAspectRatio = 1.5 # based on approx AH interface apect ratios at Mw 8 min_mag = 6.5 elif floor(float(m['class'])) == 11: magScaleRel = 'StrasserIntraslab' ruptAspectRatio = 1.2 # based on approx AH intraslab apect ratios at Mw 7.5 min_mag = 5.5 # comment out sources with null activitiy rates if m['src_N0'][-1] == -99.0: newxml += ' <!--\n' ####################################################################### # write area sources ####################################################################### if m['src_type'] == 'area': #print(m['src_type'] # rename source code if "." exists m['src_code'].replace('.', '') newxml += ' <areaSource id="'+m['src_code']+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['gmm_trt']+'">\n' newxml += ' <areaGeometry>\n' newxml += ' <gml:Polygon>\n' newxml += ' <gml:exterior>\n' newxml += ' <gml:LinearRing>\n' newxml += ' <gml:posList>\n' # get polygon text polytxt = '' pp = 0 for xy in m['src_shape'][:-1]: # no need to close poly addPoint = True # check if duplicating points if pp > 0: if xy[0] == xy0[0] and xy[1] == xy0[1]: addPoint = False if addPoint == True: polytxt = polytxt + ' ' + str("%0.4f" % xy[0]) \ + ' ' + str("%0.4f" % xy[1]) + '\n' xy0 = xy pp += 1 # add poly text newxml += polytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LinearRing>\n' newxml += ' </gml:exterior>\n' newxml += ' </gml:Polygon>\n' ################################################################### # print(model bbox of model # this is not required for the nrml files, but useful for setting up job.ini files buff = 0.1 maxlon = max(m['src_shape'][:, 0]) + buff minlon = min(m['src_shape'][:, 0]) - buff maxlat = max(m['src_shape'][:, 1]) + buff minlat = min(m['src_shape'][:, 1]) - buff # get big bbox if maxlon > bbmaxlon: bbmaxlon = maxlon if minlon < bbminlon: bbminlon = minlon if maxlat > bbmaxlat: bbmaxlat = maxlat if minlat < bbminlat: bbminlat = minlat #print(m['src_code'], minlon, minlat, ',', minlon, maxlat, ',', maxlon, maxlat, ',', maxlon, minlat ################################################################### # set depth distribution if m['src_dep'][0] <= m['src_usd'] or m['src_dep'][0] >= m[ 'src_lsd']: print(m['src_code'], 'FIX DEPTHS') newxml += ' <upperSeismoDepth>' + str( m['src_usd']) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( m['src_lsd']) + '</lowerSeismoDepth>\n' # set source geometry newxml += ' </areaGeometry>\n' newxml += ' <magScaleRel>' + magScaleRel + '</magScaleRel>\n' newxml += ' <ruptAspectRatio>' + str( ruptAspectRatio) + '</ruptAspectRatio>\n' # get weighted rates binwid = 0.1 octxt = make_collapse_occurrence_text(m, min_mag, binwid, meta, mx_dict) newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (min_mag + 0.5 * binwid)) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' """ # set GR recurrence pars tmpN0 = m['src_N0'][i] tmpbeta = m['src_beta'][i] tmpmmax = m['max_mag'][j] grtxt = ''.join((' <truncGutenbergRichterMFD aValue="', \ str("%0.4f" % log10(tmpN0)),'" bValue="', \ str("%0.4f" % beta2bval(tmpbeta)),'" minMag="', \ str("%0.2f" % m['min_mag']),'" maxMag="', \ str("%0.2f" % tmpmmax),'"/>\n')) newxml += grtxt """ # set nodal planes newxml += ' <nodalPlaneDist>\n' newxml += get_nodal_plane_text(m) newxml += ' </nodalPlaneDist>\n' # set hypo depth newxml += ' <hypoDepthDist>\n' if m['src_dep'][1] != -999.0: newxml += ' <hypoDepth probability="0.50" depth="'+str("%0.1f" % m['src_dep'][0])+'"/>\n' \ +' <hypoDepth probability="0.25" depth="'+str("%0.1f" % m['src_dep'][1])+'"/>\n' \ +' <hypoDepth probability="0.25" depth="'+str("%0.1f" % m['src_dep'][2])+'"/>\n' else: newxml += ' <hypoDepth probability="1.0" depth="' + str( "%0.1f" % m['src_dep'][0]) + '"/>\n' newxml += ' </hypoDepthDist>\n' if m['src_N0'][-1] == -99.0: newxml += ' </areaSource>\n' else: newxml += ' </areaSource>\n\n' ####################################################################### # now make fault sources ####################################################################### elif m['src_type'] == 'fault': # rename source code if "." exists m['src_code'].replace('.', '') src_code = m['src_code'] if isinf(log10(m['src_N0'][0])) == False: ################################################################### # do complex faults ################################################################### if m['fault_dip'][0] != m['fault_dip'][1]: #if m['fault_dip'][0] >= 0: # catches all faults #if m['fault_dip'][0] > 0: # id subcript idsub = str("%0.1f" % beta2bval(m['src_beta'][0])) idsub = idsub.replace(".", "") newxml += ' <complexFaultSource id="'+src_code+idsub+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['gmm_trt']+'">\n' newxml += ' <complexFaultGeometry>\n' newxml += ' <faultTopEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from surface projection # get upper h-dist upperhdist = m['src_dep'][0] / tan( radians(m['fault_dip'][0])) upperxy = get_line_parallels(m['src_shape'], upperhdist)[0] # make upper text xytxt = '' for xy in upperxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][0])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </faultTopEdge>\n' newxml += ' <intermediateEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from upper edge # get intermediate h-dist interhdist = (m['src_dep'][1] - m['src_dep'][0]) / tan( radians(m['fault_dip'][0])) interxy = get_line_parallels(upperxy, interhdist)[0] # make intermediate text xytxt = '' for xy in interxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][1])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </intermediateEdge>\n' newxml += ' <faultBottomEdge>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # calculate lat lons from intermediate edge # get bottom h-dist bottomhdist = (m['src_dep'][2] - m['src_dep'][1]) / tan( radians(m['fault_dip'][1])) bottomxy = get_line_parallels(interxy, bottomhdist)[0] # make bottom text xytxt = '' for xy in bottomxy: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1]), str(m['src_dep'][2])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' </faultBottomEdge>\n' newxml += ' </complexFaultGeometry>\n' ''' # get fault area scaling model ''' #src_code = m['src_code'] if src_code.startswith('CIS'): newxml += ' <magScaleRel>GSCCascadia</magScaleRel>\n' elif src_code.startswith('WIN'): newxml += ' <magScaleRel>GSCOffshoreThrustsWIN</magScaleRel>\n' elif src_code.startswith('HGT'): newxml += ' <magScaleRel>GSCOffshoreThrustsHGT</magScaleRel>\n' elif src_code.startswith('QCSS') or src_code.startswith( 'FWF'): newxml += ' <magScaleRel>WC1994_QCSS</magScaleRel>\n' elif src_code.startswith('EISO'): newxml += ' <magScaleRel>GSCEISO</magScaleRel>\n' elif src_code.startswith('EISB'): newxml += ' <magScaleRel>GSCEISB</magScaleRel>\n' elif src_code.startswith('EISI'): newxml += ' <magScaleRel>GSCEISI</magScaleRel>\n' else: newxml += ' <magScaleRel>' + magScaleRel + '</magScaleRel>\n' #newxml += ' <magScaleRel>WC1994</magScaleRel>\n' newxml += ' <ruptAspectRatio>' + str( ruptAspectRatio) + '</ruptAspectRatio>\n' ''' # now get appropriate MFD ''' # do incremental MFD if m['src_beta'][0] > -99: # adjust N0 value to account for weighting of fault sources octxt = make_collapse_occurrence_text( m, min_mag, binwid, meta, mx_dict) # make text newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid) ) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' if m['fault_dip'][0] != 90.: newxml += ' <rake>90.0</rake>\n' else: newxml += ' <rake>0.0</rake>\n' newxml += ' </complexFaultSource>\n\n' ################################################################### # else do simple fault ################################################################### elif m['fault_dip'][0] == m['fault_dip'][1]: # id subcript idsub = str("%0.1f" % beta2bval(m['src_beta'][0])) idsub = idsub.replace(".", "") newxml += ' <simpleFaultSource id="'+m['src_code']+idsub+'" name="'+\ m['src_name']+'" tectonicRegion="'+m['gmm_trt']+'">\n' newxml += ' <simpleFaultGeometry>\n' newxml += ' <gml:LineString>\n' newxml += ' <gml:posList>\n' # simple fauls use surface projection! ''' # calculate lat lons from surface projection # get upper h-dist upperhdist = m['src_dep'][0] / tan(radians(m['fault_dip'][0])) upperxy = get_line_parallels(m['src_shape'], upperhdist)[0] ''' xytxt = '' for xy in m['src_shape']: xytxt += ' ' + \ ' '.join((str('%0.4f' % xy[0]), str('%0.4f' % xy[1])))+'\n' newxml += xytxt newxml += ' </gml:posList>\n' newxml += ' </gml:LineString>\n' newxml += ' <dip>' + str( m['fault_dip'][0]) + '</dip>\n' newxml += ' <upperSeismoDepth>' + str( m['src_dep'][0]) + '</upperSeismoDepth>\n' newxml += ' <lowerSeismoDepth>' + str( m['src_dep'][-1]) + '</lowerSeismoDepth>\n' newxml += ' </simpleFaultGeometry>\n' ''' # get fault area scaling model ''' src_code = m['src_code'] if src_code == 'CIS': newxml += ' <magScaleRel>GSCCascadia</magScaleRel>\n' elif src_code.startswith('WIN'): newxml += ' <magScaleRel>GSCOffshoreThrustsWIN</magScaleRel>\n' elif src_code.startswith('HGT'): newxml += ' <magScaleRel>GSCOffshoreThrustsHGT</magScaleRel>\n' elif src_code.startswith('QCSS') or src_code.startswith( 'FWF'): newxml += ' <magScaleRel>WC1994_QCSS</magScaleRel>\n' elif src_code.startswith('EISO'): newxml += ' <magScaleRel>GSCEISO</magScaleRel>\n' elif src_code.startswith('EISB'): newxml += ' <magScaleRel>GSCEISB</magScaleRel>\n' elif src_code.startswith('EISI'): newxml += ' <magScaleRel>GSCEISI</magScaleRel>\n' else: newxml += ' <magScaleRel>' + magScaleRel + '</magScaleRel>\n' newxml += ' <ruptAspectRatio>' + str( ruptAspectRatio) + '</ruptAspectRatio>\n' ''' # now get appropriate MFD ''' # do incremental MFD if m['src_beta'][0] > -99: octxt = make_collapse_occurrence_text( m, min_mag, binwid, meta, mx_dict) # make text newxml += ' <incrementalMFD minMag="' + str( '%0.2f' % (m['min_mag'] + 0.5 * binwid) ) + '" binWidth="' + str(binwid) + '">\n' newxml += ' <occurRates>' + octxt + '</occurRates>\n' newxml += ' </incrementalMFD>\n' if m['fault_dip'][0] != 90.: newxml += ' <rake>90.0</rake>\n' else: newxml += ' <rake>0.0</rake>\n' if m['src_N0'][-1] == -99.0: newxml += ' </simpleFaultSource>\n' else: newxml += ' </simpleFaultSource>\n\n' # comment sources with null activity rates if m['src_N0'][-1] == -99.0: newxml += ' -->\n\n' ###################################################################### # add Australian fault-source model ###################################################################### if meta['doSeisTec'] == True: aust_fault_file = path.join('..', 'faults', 'National_Fault_Source_Model_2018_Collapsed_NSHA13', \ 'National_Fault_Source_Model_2018_Collapsed_NSHA13_all_methods_collapsed_inc_cluster_gmm_trt.xml') lines = open(aust_fault_file).readlines()[3:-2] for line in lines: newxml += ' ' + line ###################################################################### # add indoneasia-png area and fault-source model ###################################################################### #indo_png_fault_file = path.join('..', 'banda', 'Banda_Fault_Sources_NSHA_2018.xml') indo_png_source_file = path.join('2018_mw', 'Java_Banda_PNG', 'input', 'collapsed', 'Java_Banda_PNG_collapsed.xml') lines = open(indo_png_source_file).readlines()[4:-2] for line in lines: newxml += line #print('\nSkipping Banda Faults\n' ###################################################################### # finish nrml newxml += ' </sourceModel>\n' newxml += '</nrml>' # write Big BBOX #print('\nBBOX:', bbminlon, bbminlat, ',', bbminlon, bbmaxlat, ',', bbmaxlon, bbmaxlat, ',', bbmaxlon, bbminlat # write new data to file outxml = path.join(meta['modelPath'], meta['modelFile']) #outxml = '/'.join((src_folder, ''.join((outbase,'_',bl,'_',ml,'.xml')))) f = open(outxml, 'w') f.write(newxml) f.close() #srcxmls.append(outxml) return outxml