def fault_normal_from_shp(shpfile, fault, dip, ztor): from numpy import tan, radians import shapefile from mapping_tools import get_field_data, get_line_parallels # read shapefile with fault length and sliprate sf = shapefile.Reader(shpfile) fcode = get_field_data(sf, 'CODE', 'str') shapes = sf.shapes() # get down-dip range rng = ztor / tan(radians(dip)) # surface proj to top of rupt for i in range(0,len(fcode)): if fcode[i] == fault: pts = shapes[i].points posazpts, negazpts = get_line_parallels(pts, rng) return posazpts, negazpts
#ev_type = dictlist2array(nshacat, 'ev_type') lat = dictlist2array(nshacat, 'lat') lon = dictlist2array(nshacat, 'lon') datelim = dt(1900, 1, 1) #delidx = where((evdt < datelim) | (lon < 135.))[0] # read shapefile shpfile = 'shapefile/australia_ml_regions.shp' sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] for poly in shapes: polygons.append(Polygon(poly.points)) ml_reg = get_field_data(sf, 'ML_REGION', 'str') # loop through events and keep those in EA zone idx = [] i = 0 for la, lo, ed in zip(lat, lon, evdt): for poly, reg in zip(polygons, ml_reg): if reg == 'EA' or reg == 'SA': # or reg == 'WCA': pt = Point(lo, la) if pt.within(poly) and ed >= datelim: idx.append(i) i += 1 # delete events #mx_orig = delete(mx_orig, delidx) #mx_rev_ml = delete(mx_rev_ml, delidx)
1979-06-02 - Cadoux 1970-03-10 - Calingiri 1968-10-14 - Meckering ''' ############################################################################### # parse shapefile and make shapely objects ############################################################################### shpfile = path.join('data', 'iso_p_ASCMM.shp') #shpfile = path.join('data', 'iso_p_ASCMM_test.shp') print('Reading source shapefile...') sf = shapefile.Reader(shpfile) # get data fields mmi = get_field_data(sf, 'INTERP_MMI', 'float') mmilon = get_field_data(sf, 'IP_LONG', 'float') mmilat = get_field_data(sf, 'IP_LAT', 'float') mmisc = get_field_data(sf, 'WII_NEHRP', 'str') eqname = get_field_data(sf, 'EQ_NAME', 'str') eqlon = get_field_data(sf, 'EQ_LONG', 'float') eqlat = get_field_data(sf, 'EQ_LAT', 'float') eqdep = get_field_data(sf, 'DEPTH_KM', 'float') eqdate = get_field_data(sf, 'EQ_DATE', 'str') eqtime = get_field_data(sf, 'EQ_TIME', 'str') eqdt = [] eqdtstr = [] # get datetime for eqd, eqt in zip(eqdate, eqtime):
def parse_shp_attributes(shpfile): print('Reading source shapefile...') sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] polygonsCopy = [] for poly in shapes: polygons.append(Polygon(poly.points)) polygonsCopy.append(Polygon(poly.points)) # get input arrays from shapefile src_code = get_field_data(sf, 'CODE', 'str') src_name = get_field_data(sf, 'SRC_NAME', 'str') src_class = get_field_data(sf, 'CLASS', 'float') src_rte_adj = get_field_data(sf, 'RTE_ADJ_F', 'float') src_usd = get_field_data(sf, 'USD', 'float') src_lsd = get_field_data(sf, 'LSD', 'float') src_overwrite_lsd = get_field_data(sf, 'OW_LSD', 'float') src_mmin = get_field_data(sf, 'MIN_MAG', 'float') src_mmin_reg = get_field_data(sf, 'MIN_RMAG', 'float') src_mmax = get_field_data(sf, 'MMAX_BEST', 'float') src_mmax_u = get_field_data(sf, 'MMAX_UPPER', 'float') src_mmax_l = get_field_data(sf, 'MMAX_LOWER', 'float') src_bval = get_field_data(sf, 'BVAL_BEST', 'float') src_bval_u = get_field_data(sf, 'BVAL_UPPER', 'float') src_bval_l = get_field_data(sf, 'BVAL_LOWER', 'float') src_n0 = get_field_data(sf, 'N0_BEST', 'float') src_n0_u = get_field_data(sf, 'N0_UPPER', 'float') src_n0_l = get_field_data(sf, 'N0_LOWER', 'float') src_bval_fix = get_field_data(sf, 'BVAL_FIX', 'float') src_bval_fix_sd = get_field_data( sf, 'BVAL_FIX_S', 'float') # too many chars - does not recognise "D" src_mcomp = get_field_data(sf, 'MCOMP', 'str') src_ycomp = get_field_data(sf, 'YCOMP', 'str') src_shmax = get_field_data(sf, 'SHMAX', 'float') src_shm_sig = get_field_data(sf, 'SHMAX_SIG', 'float') src_ymax = get_field_data(sf, 'CAT_YMAX', 'float') src_cat = get_field_data(sf, 'CAT_FILE', 'str') sortind = argsort(src_code) return src_code, src_bval, src_n0, src_class, sf
def get_mfds(mvect, mxvect, tvect, dec_tvect, ev_dict, mcomps, ycomps, ymax, mrng, src_mmax, \ src_mmin_reg, src_bval_fix, src_bval_fix_sd, bin_width, poly): # remove incomplete events based on original preferred magnitudes (mxvect) mvect, mxvect, tvect, dec_tvect, ev_dict, out_idx, ev_out = \ remove_incomplete_events(mvect, mxvect, tvect, dec_tvect, ev_dict, mcomps, ycomps, bin_width) # get annualised rates using preferred MW (mvect) cum_rates, cum_num, bin_rates, n_obs, n_yrs = \ get_annualised_rates(mcomps, ycomps, mvect, mrng, bin_width, ymax) ############################################################################### # calculate MFDs if at least 50 events ############################################################################### # get index of min reg mag and valid mag bins diff_cum = abs(hstack((diff(cum_rates), 0.))) midx = where((mrng >= src_mmin_reg - bin_width / 2.) & (isfinite(diff_cum)))[0] # check if length of midx = 0 and get highest non-zero mag if len(midx) == 0: midx = [where(isfinite(diff_cum))[0][-1]] # make sure there is at least 4 observations for b-value calculations if len(midx) < 5: idxstart = midx[0] - 1 while idxstart >= 0 and len(midx) < 5: # if num observations greater than zero, add to midx if n_obs[idxstart] > 0: midx = hstack((idxstart, midx)) print ' get lower mag T', midx idxstart -= 1 # first, check if using fixed bval and fit curve using to solve for N0 if src_bval_fix > 0: print ' Using fixed b-value =', src_bval_fix, src_bval_fix_sd # set source beta bval = src_bval_fix beta = bval2beta(bval) sigb = src_bval_fix_sd sigbeta = bval2beta(sigb) # get dummy curve dummyN0 = 1. m_min_reg = src_mmin_reg + bin_width / 2. bc_tmp, bc_mrng = get_oq_incrementalMFD(beta, dummyN0, m_min_reg, src_mmax, bin_width) # fit to lowest mahnitude considered bc_lo100 = cum_rates[midx][0] * (bc_tmp / bc_tmp[0]) # scale for N0 fn0 = 10**(log10(bc_lo100[0]) + beta2bval(beta) * bc_mrng[0]) # do Aki ML first if N events less than 50 elif len(mvect) >= 50 and len(mvect) < 80: # do Aki max likelihood bval, sigb = aki_maximum_likelihood( mrng[midx] + bin_width / 2, n_obs[midx], 0.) # assume completeness taken care of beta = bval2beta(bval) sigbeta = bval2beta(sigb) # now recalc N0 dummyN0 = 1. bc_tmp, bc_mrng = get_oq_incrementalMFD(beta, dummyN0, mrng[0], src_mmax, bin_width) # fit to lowest magnitude considered and observed Nminmag = cum_rates[midx][0] * (bc_tmp / bc_tmp[0]) # !!!!!! check into why this must be done - I suspect it may be that there is an Mmax eq in the zones !!!! fidx = midx[0] # solve for N0 fn0 = 10**(log10(Nminmag[0]) + bval * bc_mrng[fidx]) print ' Aki ML b-value =', bval, sigb # do Weichert for zones with more events elif len(mvect) >= 80: # calculate weichert bval, sigb, a_m, siga_m, fn0, stdfn0 = weichert_algorithm(array(n_yrs[midx]), \ mrng[midx]+bin_width/2, n_obs[midx], mrate=0.0, \ bval=1.1, itstab=1E-4, maxiter=1000) beta = bval2beta(bval) sigbeta = bval2beta(sigb) print ' Weichert b-value = ', bval, sigb ############################################################################### # calculate MFDs using NSHA13_Background if fewer than 50 events ############################################################################### else: print 'Getting b-value from NSHA Background...' # set B-value to nan bval = nan # load Leonard zones lsf = shapefile.Reader( path.join('shapefiles', 'NSHA13_Background', 'NSHA13_Background_NSHA18_MFD.shp')) # get Leonard polygons l08_shapes = lsf.shapes() # get Leonard b-values lbval = get_field_data(lsf, 'BVAL_BEST', 'str') # get centroid of current poly clon, clat = get_shapely_centroid(poly) point = Point(clon, clat) # loop through zones and find point in poly for zone_bval, l_shape in zip(lbval, l08_shapes): l_poly = Polygon(l_shape.points) # check if leonard centroid in domains poly if point.within(l_poly): bval = float(zone_bval) # for those odd sites outside of L08 bounds, assign b-vale if isnan(bval): bval = 0.85 beta = bval2beta(bval) sigb = 0.1 sigbeta = bval2beta(sigb) # solve for N0 fn0 = fit_a_value(bval, mrng, cum_rates, src_mmax, bin_width, midx) print ' Leonard2008 b-value =', bval, sigb # get confidence intervals err_up, err_lo = get_confidence_intervals(n_obs, cum_rates) return bval, beta, sigb, sigbeta, fn0, cum_rates, ev_out, err_up, err_lo
############################################################################### # parse shapefile and make shapely objects ############################################################################### print 'Reading source shapefile...' sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] polygonsCopy = [] for poly in shapes: polygons.append(Polygon(poly.points)) polygonsCopy.append(Polygon(poly.points)) # get input arrays from shapefile src_code = get_field_data(sf, 'CODE', 'str') src_name = get_field_data(sf, 'SRC_NAME', 'str') src_class = get_field_data(sf, 'CLASS', 'str') src_rte_adj = get_field_data(sf, 'RTE_ADJ_F', 'float') src_usd = get_field_data(sf, 'USD', 'float') src_lsd = get_field_data(sf, 'LSD', 'float') src_overwrite_lsd = get_field_data(sf, 'OW_LSD', 'float') src_mmin = get_field_data(sf, 'MIN_MAG', 'float') src_mmin_reg = get_field_data(sf, 'MIN_RMAG', 'float') src_mmax = get_field_data(sf, 'MMAX_BEST', 'float') src_mmax_u = get_field_data(sf, 'MMAX_UPPER', 'float') src_mmax_l = get_field_data(sf, 'MMAX_LOWER', 'float') src_bval = get_field_data(sf, 'BVAL_BEST', 'float') src_bval_u = get_field_data(sf, 'BVAL_UPPER', 'float') src_bval_l = get_field_data(sf, 'BVAL_LOWER', 'float') src_n0 = get_field_data(sf, 'N0_BEST', 'float')
import shapefile from mapping_tools import get_field_data, distance, reckon, get_line_parallels from fault_tools import mag2wid_L10, mag2rupwid_WC94 from numpy import radians, cos, sin shpfile = 'simple_aus_ruptures.shp' print 'Reading source shapefile...' sf = shapefile.Reader(shpfile) ids = get_field_data(sf, 'ID', 'str') mw = get_field_data(sf, 'MW', 'float') dip = get_field_data(sf, 'DIP', 'float') shapes = sf.shapes() for i, shape in enumerate(shapes): #rupwid = mag2wid_L10(mw[i], 'scr') rupwid = mag2rupwid_WC94(mw[i], 'rs') rngkm = rupwid * cos(radians(dip[i])) rupdep = rupwid * sin(radians(dip[i])) posazpts, negazpts = get_line_parallels(shape.points, rngkm) ftxt = '\t'.join((str('%0.3f' % shape.points[0][0]),str('%0.3f' % shape.points[0][1]), '0.0')) + '\n' ftxt += '\t'.join((str('%0.3f' % shape.points[1][0]),str('%0.3f' % shape.points[1][1]), '0.0')) + '\n\n'
#shpfile = '/Users/tallen/Documents/Geoscience_Australia/NSHA2018/source_models/zones/shapefiles/ARUP_Background/ARUP_background_source_model.shp' shpfile = '/Users/tallen/Documents/Geoscience_Australia/NSHA2018/source_models/zones/shapefiles/NSHA13/NSHA13_regional_source_model_simplified.shp' sfz = shapefile.Reader(shpfile) drawshapepoly(m, plt, sfz, col='blue', lw=1.75) # label shapes #labelpolygon(m, plt, sf, 'NAME', fweight='normal', fsize=16, addOutline=True) ########################################################################################## # plt stress vectors ########################################################################################## shpfile = '/Users/tallen/Documents/Geoscience_Australia/NSHA2018/source_models/zones/shapefiles/Other/SHMax_Rajabi_2016.shp' sfv = shapefile.Reader(shpfile) lat = get_field_data(sfv, 'LAT', 'float') lon = get_field_data(sfv, 'LON', 'float') shmax = get_field_data(sfv, 'SHMAX', 'float') ########################################################################################## # annotate arrows ########################################################################################## # test to make sure arrows in right spot #x, y = m(lon, lat) #m.plot(x, y, 'r.') xm, ym = m(lon, lat) #dx = ones_like(x) * 1000. #dy = ones_like(y) * 1000. for x, y, sh in zip(xm, ym, shmax): alen = 60000
y, 'h', mfc='b', mec='w', mew=0.75, markersize=9, label='Large Dams') ########################################################################################## # add simple faults ########################################################################################## nfsmshp = '//nas//gemd//ehp//georisk_earthquake//neotectonic//Seismicity_Scenario_models//Hazard Map working 2018//ARCGIS//FSM lines//FSD_simple_faults.shp' sf = shapefile.Reader(nfsmshp) shapes = sf.shapes() records = sf.records() lt_rates = get_field_data(sf, 'SL_RT_LT', 'float') # long term slip rate lt_rates = array(lt_rates) st_rates = get_field_data(sf, 'SL_RT_ST', 'float') # short term slip rate st_rates = array(st_rates) if cwd.startswith('/nas'): cptfile = '//nas//gemd//ehp//georisk_earthquake//hazard//DATA//cpt//temperature.cpt' ncols = 18 cmap, zvals = cpt2colormap(cptfile, ncols + 1, rev=False) cmap = remove_last_cmap_colour(cmap) #cmap = mpl.cm.jet cs = (cmap(arange(cmap.N))) minslip = 0. maxslip = 160.
############################################################################### # parse AUS6 shp exported from MIF ############################################################################### ausshp = 'AUS6_Zones.shp' print 'Reading source shapefile...' sf = shapefile.Reader(ausshp) shapes = sf.shapes() polygons = [] for poly in shapes: polygons.append(Polygon(poly.points)) # get src name src_name = get_field_data(sf, 'Name', 'str') ############################################################################### # parse AUS6 lookup csv ############################################################################### auscsv = '20160526_AUS6_Zones.csv' mmin = [] mmax = [] name = [] code = [] lines = open(auscsv).readlines()[1:] for line in lines: dat = line.strip().split(',')
shpfolder = path.split(shpfile) ############################################################################### # parse shapefile and make shapely objects ############################################################################### print 'Reading source shapefile...' sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] for poly in shapes: polygons.append(Polygon(poly.points)) # get input arrays from shapefile src_code = get_field_data(sf, 'CODE', 'str') src_name = get_field_data(sf, 'SRC_NAME', 'str') src_mmin = get_field_data(sf, 'MIN_MAG', 'float') src_mmin_reg = get_field_data(sf, 'MIN_RMAG', 'float') src_mmax = get_field_data(sf, 'MMAX_BEST', 'float') src_mmax_u = get_field_data(sf, 'MMAX_UPPER', 'float') src_mmax_l = get_field_data(sf, 'MMAX_LOWER', 'float') src_bval = get_field_data(sf, 'BVAL_BEST', 'float') src_bval_u = get_field_data(sf, 'BVAL_UPPER', 'float') src_bval_l = get_field_data(sf, 'BVAL_LOWER', 'float') src_n0 = get_field_data(sf, 'N0_BEST', 'float') src_n0_u = get_field_data(sf, 'N0_UPPER', 'float') src_n0_l = get_field_data(sf, 'N0_LOWER', 'float') src_bval_fix = get_field_data(sf, 'BVAL_FIX', 'float') src_bval_fix_sd = get_field_data( sf, 'BVAL_FIX_S', 'float') # too many chars - does not recognise "D"
# loop thru shapfiles ############################################################################### for deg in degs: # load shapefile #shppath = path.join(basepath, 'Gridded_'+deg+'D', 'shapefiles', 'Gridded_'+deg+'D_NSHA18.shp') shppath = path.join(basepath, 'Radial_'+deg+'D', 'shapefiles', 'Radial_'+deg+'D_NSHA18.shp') sf = shapefile.Reader(shppath) print(shppath) # get shapes shapes = sf.shapes() # get bvalue data shp_bvals = get_field_data(sf, 'BVAL_BEST', 'float') shp_bvals_low = get_field_data(sf, 'BVAL_LOWER', 'float') # loop through points for i in range(0, len(bvals)): poly_bvals = [] poly_sigma = [] in_poly = [] point = Point(glons[i], glats[i]) for shape, shp_b, shp_bs in zip(shapes, shp_bvals, shp_bvals_low): poly = Polygon(shape.points) # check if point in poly if point.within(poly) or point.touches(poly): poly_bvals.append(shp_b) poly_sigma.append(shp_bs)
from numpy import array, arange, log10, exp, array, around, sin, pi, nan, radians, where from fault_tools import * import matplotlib.pyplot as plt import matplotlib.cm as cm import operator from os import path, sep import shapefile from mapping_tools import get_field_data #from matplotlib import mpl # read shapefile with fault length and sliprate root = path.split('shapefiles//DMF_LRF.shp')[0] shpfile = 'shapefiles//DMF_LRF.shp' sf = shapefile.Reader(shpfile) fname = get_field_data(sf, 'FAULT_NAME', 'str') fcode = get_field_data(sf, 'CODE', 'str') flen = get_field_data(sf, 'LENGTH', 'float') fdip = get_field_data(sf, 'DIP', 'float') fwt = get_field_data(sf, 'WEIGHT', 'float') slipbest = get_field_data(sf, 'SLIP_RATE', 'float') slipmin = get_field_data(sf, 'SLIP_MIN', 'float') slipmax = get_field_data(sf, 'SLIP_MAX', 'float') # get characteristic magnitude from Wells & Coppersmith 1994 mchar = [] sig = [] area = [] seis_thickness = 15. for i in range(0, len(flen)): '''
# add domains ########################################################################################## import matplotlib.patheffects as PathEffects path_effects = [PathEffects.withStroke(linewidth=2.5, foreground="w")] cptfile = '//Users//trev//Documents//DATA//GMT//cpt//keshet.cpt' # qual-dark-06.cpt keshet.cpt #aurora.cpt #cosam.cpt #Set1_06.cpt ncols = 5 cmap, zvals = cpt2colormap(cptfile, ncols + 1) cmap = remove_last_cmap_colour(cmap) cs = (cmap(arange(ncols))) shpfile = 'shapefiles/adj_neotectonic_domains.shp' sf = shapefile.Reader(shpfile) shapes = sf.shapes() trts = get_field_data(sf, 'trt', 'str') utrts = unique(trts) labels = ['Precambrian Craton', 'Reactivated Proterozoic Crust', 'Extended Continental Crust', \ 'Phanerozoic Accretionary Crust', 'Passive Margin'] for c, utrt, label in zip(cs, utrts, labels): labeLegend = True for shape, trt in zip(shapes, trts): if trt == utrt: x = [] y = [] p = 0 parts = shape.parts parts.append(len(shape.points))
#from obspy.arclink.client import Client #from obspy.core import read from os import path, getcwd import shapefile from shapely.geometry import Point, Polygon from mapping_tools import get_field_data # read shapefile shpfile = 'shapefiles/nac_gmm_zones.shp' sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] for poly in shapes: polygons.append(Polygon(poly.points)) zone_code = get_field_data(sf, 'CODE', 'str') # parse catalogue evdict = parse_usgs_events(usgscsv) #a=b # kill from obspy.io.xseed import Parser # read dataless seed volumes print('Reading dataless seed volumes...') if getcwd().startswith('/nas'): au_parser = Parser( '/nas/active/ops/community_safety/ehp/georisk_earthquake/hazard/Networks/AU/AU.IRIS.dataless' )
from mpl_toolkits.basemap import Basemap from numpy import array, arange, mean, percentile, array, unique, where, ones_like, zeros_like, sin, radians import matplotlib.pyplot as plt from mapping_tools import drawoneshapepoly, distance, reckon import shapefile from mapping_tools import get_field_data from fault_tools import * cnrs = [-124.4, -121.6, 48, 49] mapres = 'h' grdsize = .4 # raed shp shpfile = '//Users//tallen//Documents//2020_National_Hazard//2020_gsc_nshm//sources//fault_sources//shapefiles//DMF_LRF.shp' sf = shapefile.Reader(shpfile) fcode = get_field_data(sf, 'CODE', 'str') flen = get_field_data(sf, 'LENGTH', 'float') seis_thickness = 15. dip = 70. fig = plt.figure(1, figsize=(15, 15)) llcrnrlon = cnrs[0] urcrnrlon = cnrs[1] llcrnrlat = cnrs[2] urcrnrlat = cnrs[3] lon_0 = mean([llcrnrlon, urcrnrlon]) lat_1 = percentile([llcrnrlat, urcrnrlat], 25) lat_2 = percentile([llcrnrlat, urcrnrlat], 75)
from misc_tools import savitzky_golay import scipy.odr.odrpack as odrpack from scipy.stats import linregress import pickle print('Loading pkl file...') stdict = pickle.load(open("stdict.pkl", "rb")) shpfile = 'shapefiles/2021_nac_gmm_zones.shp' sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] for poly in shapes: polygons.append(Polygon(poly.points)) zone_code = get_field_data(sf, 'CODE', 'str') zone_group = get_field_data(sf, 'ZONE_GROUP', 'str') # loop thru zones i = 0 vs30 = [] res_stack = [] print('Getting residuals...') idx = [] for poly, zcode, zgroup in zip(polygons, zone_code, zone_group): for i, sd in enumerate(stdict): pt = Point(sd['eqlo'], sd['eqla']) if zgroup == 'BS': mmin = 5.25
shpfolder = path.split(shpfile) ############################################################################### # parse shapefile and make shapely objects ############################################################################### print 'Reading source shapefile...' sf = shapefile.Reader(shpfile) shapes = sf.shapes() polygons = [] for poly in shapes: polygons.append(Polygon(poly.points)) # get input arrays from shapefile src_code = get_field_data(sf, 'CODE', 'str') src_name = get_field_data(sf, 'SRC_NAME', 'str') src_mmin = get_field_data(sf, 'MIN_MAG', 'float') src_mmin_reg = get_field_data(sf, 'MIN_RMAG', 'float') src_mmax = get_field_data(sf, 'MMAX_BEST', 'float') src_mmax_u = get_field_data(sf, 'MMAX_UPPER', 'float') src_mmax_l = get_field_data(sf, 'MMAX_LOWER', 'float') src_beta = get_field_data(sf, 'BETA_BEST', 'float') src_beta_u = get_field_data(sf, 'BETA_UPPER', 'float') src_beta_l = get_field_data(sf, 'BETA_LOWER', 'float') src_n0 = get_field_data(sf, 'N0_BEST', 'float') src_n0_u = get_field_data(sf, 'N0_UPPER', 'float') src_n0_l = get_field_data(sf, 'N0_LOWER', 'float') src_beta_fix = get_field_data(sf, 'BETA_FIX', 'float') src_beta_fix_sd = get_field_data( sf, 'BETA_FIX_S', 'float') # too many chars - does not recognise "D"